From cb155a11728984c49cfc0a33070bd3398ca7df59 Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+rkssisodiya@users.noreply.github.com> Date: Sat, 14 Oct 2023 09:16:14 +0530 Subject: [PATCH 01/23] feat: opamp server with agent config provider (#3737) * feat: add interface for opamp.AgentConfigProvider * feat: add iface and plumbing for generating recommended conf in opamp/agent * feat: get opamp server config provider tests started * chore: add test scenario for agent connection without a config recommendation * chore: add test scenario for agent connection with a config recommendation * chore: add test for validating config deployment status gets reported * chore: add test for rolling out latest config recommendations when config changes * chore: wrap up opamp server lifecycle tests * chore: some tests cleanup * chore: get all tests passing * chore: update opamp server init logic in ee query service * chore: some cleanup * chore: some final cleanup --- ee/query-service/app/server.go | 12 +- .../app/opamp/config_provider.go | 12 + .../app/opamp/config_provider_test.go | 256 ++++++++++++++++++ pkg/query-service/app/opamp/mocks.go | 134 +++++++++ pkg/query-service/app/opamp/model/agent.go | 63 +++-- pkg/query-service/app/opamp/model/agents.go | 45 +++ pkg/query-service/app/opamp/model/config.go | 20 ++ .../app/opamp/model/constants.go | 4 + pkg/query-service/app/opamp/opamp_server.go | 46 +++- pkg/query-service/app/server.go | 13 +- .../integration/logparsingpipeline_test.go | 46 +--- 11 files changed, 575 insertions(+), 76 deletions(-) create mode 100644 pkg/query-service/app/opamp/config_provider.go create mode 100644 pkg/query-service/app/opamp/config_provider_test.go create mode 100644 pkg/query-service/app/opamp/mocks.go create mode 100644 pkg/query-service/app/opamp/model/config.go create mode 100644 pkg/query-service/app/opamp/model/constants.go diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 834575643b..5dda1e7237 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -88,6 +88,8 @@ type Server struct { // Usage manager usageManager *usage.Manager + opampServer *opamp.Server + unavailableChannel chan healthcheck.Status } @@ -254,6 +256,12 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { s.privateHTTP = privateServer + // TODO(Raj): Replace this with actual provider in a follow up PR + agentConfigProvider := opamp.NewMockAgentConfigProvider() + s.opampServer = opamp.InitializeServer( + &opAmpModel.AllAgents, agentConfigProvider, + ) + return s, nil } @@ -569,7 +577,7 @@ func (s *Server) Start() error { go func() { zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) - err := opamp.InitializeAndStartServer(baseconst.OpAmpWsEndpoint, &opAmpModel.AllAgents) + err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) if err != nil { zap.S().Info("opamp ws server failed to start", err) s.unavailableChannel <- healthcheck.Unavailable @@ -592,7 +600,7 @@ func (s *Server) Stop() error { } } - opamp.StopServer() + s.opampServer.Stop() if s.ruleManager != nil { s.ruleManager.Stop() diff --git a/pkg/query-service/app/opamp/config_provider.go b/pkg/query-service/app/opamp/config_provider.go new file mode 100644 index 0000000000..0978890cb1 --- /dev/null +++ b/pkg/query-service/app/opamp/config_provider.go @@ -0,0 +1,12 @@ +package opamp + +import "go.signoz.io/signoz/pkg/query-service/app/opamp/model" + +// Interface for a source of otel collector config recommendations. +type AgentConfigProvider interface { + model.AgentConfigProvider + + // Subscribe to be notified on changes in config provided by this source. + // Used for rolling out latest config recommendation to all connected agents when settings change + SubscribeToConfigUpdates(callback func()) (unsubscribe func()) +} diff --git a/pkg/query-service/app/opamp/config_provider_test.go b/pkg/query-service/app/opamp/config_provider_test.go new file mode 100644 index 0000000000..083c396a41 --- /dev/null +++ b/pkg/query-service/app/opamp/config_provider_test.go @@ -0,0 +1,256 @@ +package opamp + +import ( + "fmt" + "log" + "net" + "os" + "testing" + + "github.com/knadh/koanf" + "github.com/knadh/koanf/parsers/yaml" + "github.com/knadh/koanf/providers/rawbytes" + _ "github.com/mattn/go-sqlite3" + "github.com/open-telemetry/opamp-go/protobufs" + "github.com/pkg/errors" + "github.com/stretchr/testify/require" + "go.signoz.io/signoz/pkg/query-service/app/opamp/model" + "golang.org/x/exp/maps" +) + +func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) { + require := require.New(t) + + tb := newTestbed(t) + + require.Equal( + 0, len(tb.testConfigProvider.ConfigUpdateSubscribers), + "there should be no agent config subscribers at the start", + ) + tb.StartServer() + require.Equal( + 1, len(tb.testConfigProvider.ConfigUpdateSubscribers), + "Opamp server should have subscribed to updates from config provider after being started", + ) + + // Server should always respond with a RemoteConfig when an agent connects. + // Even if there are no recommended changes to the agent's initial config + require.False(tb.testConfigProvider.HasRecommendations()) + agent1Conn := &MockOpAmpConnection{} + agent1Id := "testAgent1" + tb.opampServer.OnMessage( + agent1Conn, + &protobufs.AgentToServer{ + InstanceUid: agent1Id, + EffectiveConfig: &protobufs.EffectiveConfig{ + ConfigMap: initialAgentConf(), + }, + }, + ) + lastAgent1Msg := agent1Conn.LatestMsgFromServer() + require.NotNil( + lastAgent1Msg, + "Server should always send a remote config to the agent when it connects", + ) + require.Equal( + RemoteConfigBody(lastAgent1Msg), + string(initialAgentConf().ConfigMap[model.CollectorConfigFilename].Body), + ) + + tb.testConfigProvider.ZPagesEndpoint = "localhost:55555" + require.True(tb.testConfigProvider.HasRecommendations()) + agent2Id := "testAgent2" + agent2Conn := &MockOpAmpConnection{} + tb.opampServer.OnMessage( + agent2Conn, + &protobufs.AgentToServer{ + InstanceUid: agent2Id, + EffectiveConfig: &protobufs.EffectiveConfig{ + ConfigMap: initialAgentConf(), + }, + }, + ) + lastAgent2Msg := agent2Conn.LatestMsgFromServer() + require.NotNil( + lastAgent2Msg, + "server should recommend a config to agent when it connects", + ) + + recommendedEndpoint, err := GetStringValueFromYaml( + []byte(RemoteConfigBody(lastAgent2Msg)), "extensions.zpages.endpoint", + ) + require.Nil(err) + require.Equal( + tb.testConfigProvider.ZPagesEndpoint, recommendedEndpoint, + "server should send recommended config to agent when it connects", + ) + + agent2Conn.ClearMsgsFromServer() + tb.opampServer.OnMessage(agent2Conn, &protobufs.AgentToServer{ + InstanceUid: agent2Id, + EffectiveConfig: &protobufs.EffectiveConfig{ + ConfigMap: NewAgentConfigMap( + []byte(RemoteConfigBody(lastAgent2Msg)), + ), + }, + RemoteConfigStatus: &protobufs.RemoteConfigStatus{ + Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED, + LastRemoteConfigHash: lastAgent2Msg.RemoteConfig.ConfigHash, + }, + }) + expectedConfId := tb.testConfigProvider.ZPagesEndpoint + require.True(tb.testConfigProvider.HasReportedDeploymentStatus(expectedConfId, agent2Id), + "Server should report deployment success to config provider on receiving update from agent.", + ) + require.True(tb.testConfigProvider.ReportedDeploymentStatuses[expectedConfId][agent2Id]) + require.Nil( + agent2Conn.LatestMsgFromServer(), + "Server should not recommend a RemoteConfig if agent is already running it.", + ) + + // Server should rollout latest config to all agents when notified of a change by config provider + agent1Conn.ClearMsgsFromServer() + agent2Conn.ClearMsgsFromServer() + tb.testConfigProvider.ZPagesEndpoint = "localhost:66666" + tb.testConfigProvider.NotifySubscribersOfChange() + for _, agentConn := range []*MockOpAmpConnection{agent1Conn, agent2Conn} { + lastMsg := agentConn.LatestMsgFromServer() + + recommendedEndpoint, err := GetStringValueFromYaml( + []byte(RemoteConfigBody(lastMsg)), "extensions.zpages.endpoint", + ) + require.Nil(err) + require.Equal(tb.testConfigProvider.ZPagesEndpoint, recommendedEndpoint) + } + + lastAgent2Msg = agent2Conn.LatestMsgFromServer() + tb.opampServer.OnMessage(agent2Conn, &protobufs.AgentToServer{ + InstanceUid: agent2Id, + RemoteConfigStatus: &protobufs.RemoteConfigStatus{ + Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED, + LastRemoteConfigHash: lastAgent2Msg.RemoteConfig.ConfigHash, + }, + }) + expectedConfId = tb.testConfigProvider.ZPagesEndpoint + require.True(tb.testConfigProvider.HasReportedDeploymentStatus(expectedConfId, agent2Id), + "Server should report deployment failure to config provider on receiving update from agent.", + ) + require.False(tb.testConfigProvider.ReportedDeploymentStatuses[expectedConfId][agent2Id]) + + require.Equal(1, len(tb.testConfigProvider.ConfigUpdateSubscribers)) + tb.opampServer.Stop() + require.Equal( + 0, len(tb.testConfigProvider.ConfigUpdateSubscribers), + "Opamp server should have unsubscribed to config provider updates after shutdown", + ) +} + +type testbed struct { + testConfigProvider *MockAgentConfigProvider + opampServer *Server + t *testing.T +} + +func newTestbed(t *testing.T) *testbed { + // Init opamp model. + testDBFile, err := os.CreateTemp("", "test-signoz-db-*") + if err != nil { + t.Fatalf("could not create temp file for test db: %v", err) + } + testDBFilePath := testDBFile.Name() + t.Cleanup(func() { os.Remove(testDBFilePath) }) + testDBFile.Close() + + _, err = model.InitDB(testDBFilePath) + if err != nil { + t.Fatalf("could not init opamp model: %v", err) + } + + testConfigProvider := NewMockAgentConfigProvider() + opampServer := InitializeServer(nil, testConfigProvider) + + return &testbed{ + testConfigProvider: testConfigProvider, + opampServer: opampServer, + t: t, + } +} + +func (tb *testbed) StartServer() { + testListenPath := GetAvailableLocalAddress() + err := tb.opampServer.Start(testListenPath) + require.Nil(tb.t, err, "should be able to start opamp server") +} + +// Test helper +func GetStringValueFromYaml( + serializedYaml []byte, path string, +) (string, error) { + if len(serializedYaml) < 1 { + return "", fmt.Errorf("yaml data is empty") + } + + k := koanf.New(".") + err := k.Load(rawbytes.Provider(serializedYaml), yaml.Parser()) + if err != nil { + return "", errors.Wrap(err, "could not unmarshal collector config") + } + + return k.String("extensions.zpages.endpoint"), nil +} + +// Returns body of a ServerToAgent.RemoteConfig or "" +func RemoteConfigBody(msg *protobufs.ServerToAgent) string { + if msg == nil { + return "" + } + + collectorConfFiles := msg.RemoteConfig.Config.ConfigMap + if len(collectorConfFiles) < 1 { + return "" + } + return string(maps.Values(collectorConfFiles)[0].Body) +} + +func NewAgentConfigMap(body []byte) *protobufs.AgentConfigMap { + return &protobufs.AgentConfigMap{ + ConfigMap: map[string]*protobufs.AgentConfigFile{ + model.CollectorConfigFilename: { + Body: body, + ContentType: "text/yaml", + }, + }, + } + +} + +func initialAgentConf() *protobufs.AgentConfigMap { + return NewAgentConfigMap( + []byte(` + receivers: + otlp: + processors: + batch: + exporters: + otlp: + service: + pipelines: + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + `), + ) +} + +// Brought in from https://github.com/open-telemetry/opamp-go/blob/main/internal/testhelpers/nethelpers.go +func GetAvailableLocalAddress() string { + ln, err := net.Listen("tcp", "127.0.0.1:") + if err != nil { + log.Fatalf("failed to get a free local port: %v", err) + } + // There is a possible race if something else takes this same port before + // the test uses it, however, that is unlikely in practice. + defer ln.Close() + return ln.Addr().String() +} diff --git a/pkg/query-service/app/opamp/mocks.go b/pkg/query-service/app/opamp/mocks.go new file mode 100644 index 0000000000..705fe38bcf --- /dev/null +++ b/pkg/query-service/app/opamp/mocks.go @@ -0,0 +1,134 @@ +package opamp + +import ( + "context" + "net" + + "github.com/google/uuid" + "github.com/knadh/koanf" + "github.com/knadh/koanf/parsers/yaml" + "github.com/knadh/koanf/providers/rawbytes" + "github.com/open-telemetry/opamp-go/protobufs" + "github.com/pkg/errors" +) + +type MockOpAmpConnection struct { + ServerToAgentMsgs []*protobufs.ServerToAgent +} + +func (conn *MockOpAmpConnection) Send(ctx context.Context, msg *protobufs.ServerToAgent) error { + conn.ServerToAgentMsgs = append(conn.ServerToAgentMsgs, msg) + return nil +} + +func (conn *MockOpAmpConnection) LatestMsgFromServer() *protobufs.ServerToAgent { + if len(conn.ServerToAgentMsgs) < 1 { + return nil + } + return conn.ServerToAgentMsgs[len(conn.ServerToAgentMsgs)-1] +} + +func (conn *MockOpAmpConnection) ClearMsgsFromServer() []*protobufs.ServerToAgent { + msgs := conn.ServerToAgentMsgs + conn.ServerToAgentMsgs = []*protobufs.ServerToAgent{} + return msgs +} + +func (conn *MockOpAmpConnection) Disconnect() error { + return nil +} +func (conn *MockOpAmpConnection) RemoteAddr() net.Addr { + return nil +} + +// Implements opamp.AgentConfigProvider +type MockAgentConfigProvider struct { + // An updated config is recommended by TestAgentConfProvider + // if `ZPagesEndpoint` is not empty + ZPagesEndpoint string + + ConfigUpdateSubscribers map[string]func() + + // { configId: { agentId: isOk } } + ReportedDeploymentStatuses map[string]map[string]bool +} + +func NewMockAgentConfigProvider() *MockAgentConfigProvider { + return &MockAgentConfigProvider{ + ConfigUpdateSubscribers: map[string]func(){}, + ReportedDeploymentStatuses: map[string]map[string]bool{}, + } +} + +// Test helper. +func (ta *MockAgentConfigProvider) HasRecommendations() bool { + return len(ta.ZPagesEndpoint) > 0 +} + +// AgentConfigProvider interface +func (ta *MockAgentConfigProvider) RecommendAgentConfig(baseConfYaml []byte) ( + []byte, string, error, +) { + if len(ta.ZPagesEndpoint) < 1 { + return baseConfYaml, "agent-base-config", nil + } + + k := koanf.New(".") + err := k.Load(rawbytes.Provider(baseConfYaml), yaml.Parser()) + if err != nil { + return nil, "", errors.Wrap(err, "could not unmarshal baseConf") + } + + k.Set("extensions.zpages.endpoint", ta.ZPagesEndpoint) + recommendedYaml, err := k.Marshal(yaml.Parser()) + if err != nil { + return nil, "", errors.Wrap(err, "could not marshal recommended conf") + } + + confId := ta.ZPagesEndpoint + return recommendedYaml, confId, nil +} + +// AgentConfigProvider interface +func (ta *MockAgentConfigProvider) ReportConfigDeploymentStatus( + agentId string, + configId string, + err error, +) { + confIdReports := ta.ReportedDeploymentStatuses[configId] + if confIdReports == nil { + confIdReports = map[string]bool{} + ta.ReportedDeploymentStatuses[configId] = confIdReports + } + + confIdReports[agentId] = (err == nil) +} + +// Test helper. +func (ta *MockAgentConfigProvider) HasReportedDeploymentStatus( + configId string, agentId string, +) bool { + confIdReports := ta.ReportedDeploymentStatuses[configId] + if confIdReports == nil { + return false + } + _, exists := confIdReports[agentId] + return exists +} + +// AgentConfigProvider interface +func (ta *MockAgentConfigProvider) SubscribeToConfigUpdates(callback func()) func() { + subscriberId := uuid.NewString() + ta.ConfigUpdateSubscribers[subscriberId] = callback + + return func() { + delete(ta.ConfigUpdateSubscribers, subscriberId) + } +} + +// test helper. +func (ta *MockAgentConfigProvider) NotifySubscribersOfChange() { + for _, callback := range ta.ConfigUpdateSubscribers { + callback() + } +} diff --git a/pkg/query-service/app/opamp/model/agent.go b/pkg/query-service/app/opamp/model/agent.go index ba2ecfcddc..a6f9dd66ef 100644 --- a/pkg/query-service/app/opamp/model/agent.go +++ b/pkg/query-service/app/opamp/model/agent.go @@ -7,6 +7,7 @@ import ( "sync" "time" + "go.uber.org/zap" "google.golang.org/protobuf/proto" "github.com/open-telemetry/opamp-go/protobufs" @@ -72,12 +73,6 @@ func (agent *Agent) Upsert() error { return nil } -func (agent *Agent) UpdateStatus(statusMsg *protobufs.AgentToServer, response *protobufs.ServerToAgent) { - agent.mux.Lock() - defer agent.mux.Unlock() - agent.processStatusUpdate(statusMsg, response) -} - // extracts lb exporter support flag from agent description. the flag // is used to decide if lb exporter can be enabled on the agent. func ExtractLbFlag(agentDescr *protobufs.AgentDescription) bool { @@ -208,9 +203,20 @@ func (agent *Agent) hasCapability(capability protobufs.AgentCapabilities) bool { return agent.Status.Capabilities&uint64(capability) != 0 } +func (agent *Agent) UpdateStatus( + statusMsg *protobufs.AgentToServer, + response *protobufs.ServerToAgent, + configProvider AgentConfigProvider, +) { + agent.mux.Lock() + defer agent.mux.Unlock() + agent.processStatusUpdate(statusMsg, response, configProvider) +} + func (agent *Agent) processStatusUpdate( newStatus *protobufs.AgentToServer, response *protobufs.ServerToAgent, + configProvider AgentConfigProvider, ) { // We don't have any status for this Agent, or we lost the previous status update from the Agent, so our // current status is not up-to-date. @@ -237,12 +243,16 @@ func (agent *Agent) processStatusUpdate( response.Flags |= uint64(protobufs.ServerToAgentFlags_ServerToAgentFlags_ReportFullState) } + // This needs to be done before agent.updateRemoteConfig() to ensure it sees + // the latest value for agent.EffectiveConfig when generating a config recommendation + agent.updateEffectiveConfig(newStatus, response) + configChanged := false if agentDescrChanged { // Agent description is changed. // We need to recalculate the config. - configChanged = agent.updateRemoteConfig() + configChanged = agent.updateRemoteConfig(configProvider) } // If remote config is changed and different from what the Agent has then @@ -254,13 +264,21 @@ func (agent *Agent) processStatusUpdate( // does not have this config (hash is different). Send the new config the Agent. response.RemoteConfig = agent.remoteConfig agent.SendToAgent(response) - } - agent.updateEffectiveConfig(newStatus, response) + ListenToConfigUpdate( + agent.ID, + string(response.RemoteConfig.ConfigHash), + configProvider.ReportConfigDeploymentStatus, + ) + } } -func (agent *Agent) updateRemoteConfig() bool { - hash := sha256.New() +func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool { + recommendedConfig, confId, err := configProvider.RecommendAgentConfig([]byte(agent.EffectiveConfig)) + if err != nil { + zap.S().Errorf("could not generate config recommendation for agent %d: %w", agent.ID, err) + return false + } cfg := protobufs.AgentRemoteConfig{ Config: &protobufs.AgentConfigMap{ @@ -268,14 +286,25 @@ func (agent *Agent) updateRemoteConfig() bool { }, } - // Calculate the hash. - for k, v := range cfg.Config.ConfigMap { - hash.Write([]byte(k)) - hash.Write(v.Body) - hash.Write([]byte(v.ContentType)) + cfg.Config.ConfigMap[CollectorConfigFilename] = &protobufs.AgentConfigFile{ + Body: recommendedConfig, + ContentType: "application/x-yaml", } - cfg.ConfigHash = hash.Sum(nil) + if len(confId) < 1 { + // Should never happen. Handle gracefully if it does by some chance. + zap.S().Errorf("config provider recommended a config with empty confId. Using content hash for configId") + + hash := sha256.New() + for k, v := range cfg.Config.ConfigMap { + hash.Write([]byte(k)) + hash.Write(v.Body) + hash.Write([]byte(v.ContentType)) + } + cfg.ConfigHash = hash.Sum(nil) + } else { + cfg.ConfigHash = []byte(confId) + } configChanged := !isEqualRemoteConfig(agent.remoteConfig, &cfg) diff --git a/pkg/query-service/app/opamp/model/agents.go b/pkg/query-service/app/opamp/model/agents.go index 18faddb48b..e835ee8ccc 100644 --- a/pkg/query-service/app/opamp/model/agents.go +++ b/pkg/query-service/app/opamp/model/agents.go @@ -6,7 +6,10 @@ import ( "time" "github.com/jmoiron/sqlx" + "github.com/open-telemetry/opamp-go/protobufs" "github.com/open-telemetry/opamp-go/server/types" + "github.com/pkg/errors" + "go.uber.org/zap" ) var db *sqlx.DB @@ -115,3 +118,45 @@ func (agents *Agents) GetAllAgents() []*Agent { } return allAgents } + +// Recommend latest config to connected agents whose effective +// config is not the same as the latest recommendation +func (agents *Agents) RecommendLatestConfigToAll( + provider AgentConfigProvider, +) error { + for _, agent := range agents.GetAllAgents() { + newConfig, confId, err := provider.RecommendAgentConfig( + []byte(agent.EffectiveConfig), + ) + if err != nil { + return errors.Wrap(err, fmt.Sprintf( + "could not generate conf recommendation for %v", agent.ID, + )) + } + + // Recommendation is same as current config + if string(newConfig) == agent.EffectiveConfig { + zap.S().Infof( + "Recommended config same as current effective config for agent %s", agent.ID, + ) + return nil + } + + agent.SendToAgent(&protobufs.ServerToAgent{ + RemoteConfig: &protobufs.AgentRemoteConfig{ + Config: &protobufs.AgentConfigMap{ + ConfigMap: map[string]*protobufs.AgentConfigFile{ + CollectorConfigFilename: { + Body: newConfig, + ContentType: "application/x-yaml", + }, + }, + }, + ConfigHash: []byte(confId), + }, + }) + + ListenToConfigUpdate(agent.ID, confId, provider.ReportConfigDeploymentStatus) + } + return nil +} diff --git a/pkg/query-service/app/opamp/model/config.go b/pkg/query-service/app/opamp/model/config.go new file mode 100644 index 0000000000..026ef947c3 --- /dev/null +++ b/pkg/query-service/app/opamp/model/config.go @@ -0,0 +1,20 @@ +package model + +// Interface for source of otel collector config recommendations. +type AgentConfigProvider interface { + // Generate recommended config for an agent based on its `currentConfYaml` + // and current state of user facing settings for agent based features. + RecommendAgentConfig(currentConfYaml []byte) ( + recommendedConfYaml []byte, + // Opaque id of the recommended config, used for reporting deployment status updates + configId string, + err error, + ) + + // Report deployment status for config recommendations generated by RecommendAgentConfig + ReportConfigDeploymentStatus( + agentId string, + configId string, + err error, + ) +} diff --git a/pkg/query-service/app/opamp/model/constants.go b/pkg/query-service/app/opamp/model/constants.go new file mode 100644 index 0000000000..293922c424 --- /dev/null +++ b/pkg/query-service/app/opamp/model/constants.go @@ -0,0 +1,4 @@ +package model + +// Must match collectorConfigKey in https://github.com/SigNoz/signoz-otel-collector/blob/main/opamp/config_manager.go +const CollectorConfigFilename = "collector.yaml" diff --git a/pkg/query-service/app/opamp/opamp_server.go b/pkg/query-service/app/opamp/opamp_server.go index 201fd598c7..2a7ba4c6fa 100644 --- a/pkg/query-service/app/opamp/opamp_server.go +++ b/pkg/query-service/app/opamp/opamp_server.go @@ -18,35 +18,32 @@ type Server struct { agents *model.Agents logger *zap.Logger capabilities int32 + + agentConfigProvider AgentConfigProvider + + // cleanups to be run when stopping the server + cleanups []func() } const capabilities = protobufs.ServerCapabilities_ServerCapabilities_AcceptsEffectiveConfig | protobufs.ServerCapabilities_ServerCapabilities_OffersRemoteConfig | protobufs.ServerCapabilities_ServerCapabilities_AcceptsStatus -func InitializeServer(listener string, agents *model.Agents) *Server { +func InitializeServer( + agents *model.Agents, agentConfigProvider AgentConfigProvider, +) *Server { if agents == nil { agents = &model.AllAgents } opAmpServer = &Server{ - agents: agents, + agents: agents, + agentConfigProvider: agentConfigProvider, } opAmpServer.server = server.New(zap.S()) return opAmpServer } -func InitializeAndStartServer(listener string, agents *model.Agents) error { - InitializeServer(listener, agents) - return opAmpServer.Start(listener) -} - -func StopServer() { - if opAmpServer != nil { - opAmpServer.Stop() - } -} - func (srv *Server) Start(listener string) error { settings := server.StartSettings{ Settings: server.Settings{ @@ -58,10 +55,24 @@ func (srv *Server) Start(listener string) error { ListenEndpoint: listener, } + unsubscribe := srv.agentConfigProvider.SubscribeToConfigUpdates(func() { + err := srv.agents.RecommendLatestConfigToAll(srv.agentConfigProvider) + if err != nil { + zap.S().Errorf( + "could not roll out latest config recommendation to connected agents: %w", err, + ) + } + }) + srv.cleanups = append(srv.cleanups, unsubscribe) + return srv.server.Start(settings) } func (srv *Server) Stop() { + for _, cleanup := range srv.cleanups { + defer cleanup() + } + srv.server.Stop(context.Background()) } @@ -80,7 +91,12 @@ func (srv *Server) OnMessage(conn types.Connection, msg *protobufs.AgentToServer if created { agent.CanLB = model.ExtractLbFlag(msg.AgentDescription) - zap.S().Debugf("New agent added:", zap.Bool("canLb", agent.CanLB), zap.String("ID", agent.ID), zap.Any("status", agent.CurrentStatus)) + zap.S().Debugf( + "New agent added:", + zap.Bool("canLb", agent.CanLB), + zap.String("ID", agent.ID), + zap.Any("status", agent.CurrentStatus), + ) } var response *protobufs.ServerToAgent @@ -89,7 +105,7 @@ func (srv *Server) OnMessage(conn types.Connection, msg *protobufs.AgentToServer Capabilities: uint64(capabilities), } - agent.UpdateStatus(msg, response) + agent.UpdateStatus(msg, response, srv.agentConfigProvider) return response } diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 69f3a9367f..6e3f267491 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -76,6 +76,8 @@ type Server struct { privateConn net.Listener privateHTTP *http.Server + opampServer *opamp.Server + unavailableChannel chan healthcheck.Status } @@ -204,6 +206,13 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { if err := agentConf.Initiate(localDB, "sqlite"); err != nil { return nil, err } + + // TODO(Raj): Replace this with actual provider in a follow up PR + agentConfigProvider := opamp.NewMockAgentConfigProvider() + s.opampServer = opamp.InitializeServer( + &opAmpModel.AllAgents, agentConfigProvider, + ) + return s, nil } @@ -503,7 +512,7 @@ func (s *Server) Start() error { go func() { zap.S().Info("Starting OpAmp Websocket server", zap.String("addr", constants.OpAmpWsEndpoint)) - err := opamp.InitializeAndStartServer(constants.OpAmpWsEndpoint, &opAmpModel.AllAgents) + err := s.opampServer.Start(constants.OpAmpWsEndpoint) if err != nil { zap.S().Info("opamp ws server failed to start", err) s.unavailableChannel <- healthcheck.Unavailable @@ -526,7 +535,7 @@ func (s *Server) Stop() error { } } - opamp.StopServer() + s.opampServer.Stop() if s.ruleManager != nil { s.ruleManager.Stop() diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go index 21bc02b1f8..0a1105e63c 100644 --- a/pkg/query-service/tests/integration/logparsingpipeline_test.go +++ b/pkg/query-service/tests/integration/logparsingpipeline_test.go @@ -6,7 +6,6 @@ import ( "encoding/json" "fmt" "io" - "net" "net/http" "net/http/httptest" "os" @@ -299,7 +298,7 @@ type LogPipelinesTestBed struct { testUser *model.User apiHandler *app.APIHandler opampServer *opamp.Server - opampClientConn *mockOpAmpConnection + opampClientConn *opamp.MockOpAmpConnection } func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed { @@ -447,7 +446,7 @@ func (tb *LogPipelinesTestBed) GetPipelinesFromQS() *logparsingpipeline.Pipeline func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( pipelines []logparsingpipeline.Pipeline, ) { - lastMsg := tb.opampClientConn.latestMsgFromServer() + lastMsg := tb.opampClientConn.LatestMsgFromServer() collectorConfigFiles := lastMsg.RemoteConfig.Config.ConfigMap assert.Equal( tb.t, len(collectorConfigFiles), 1, @@ -516,7 +515,7 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( } func (tb *LogPipelinesTestBed) simulateOpampClientAcknowledgementForLatestConfig() { - lastMsg := tb.opampClientConn.latestMsgFromServer() + lastMsg := tb.opampClientConn.LatestMsgFromServer() tb.opampServer.OnMessage(tb.opampClientConn, &protobufs.AgentToServer{ InstanceUid: "test", EffectiveConfig: &protobufs.EffectiveConfig{ @@ -564,7 +563,7 @@ func assertPipelinesResponseMatchesPostedPipelines( } } -func mockOpampAgent(testDBFilePath string) (*opamp.Server, *mockOpAmpConnection, error) { +func mockOpampAgent(testDBFilePath string) (*opamp.Server, *opamp.MockOpAmpConnection, error) { // Mock an available opamp agent testDB, err := opampModel.InitDB(testDBFilePath) if err != nil { @@ -575,8 +574,8 @@ func mockOpampAgent(testDBFilePath string) (*opamp.Server, *mockOpAmpConnection, return nil, nil, err } - opampServer := opamp.InitializeServer(constants.OpAmpWsEndpoint, nil) - opampClientConnection := &mockOpAmpConnection{} + opampServer := opamp.InitializeServer(nil, opamp.NewMockAgentConfigProvider()) + opampClientConnection := &opamp.MockOpAmpConnection{} opampServer.OnMessage( opampClientConnection, &protobufs.AgentToServer{ @@ -674,36 +673,3 @@ func NewAuthenticatedTestRequest( req.Header.Add("Authorization", "Bearer "+userJwt.AccessJwt) return req, nil } - -type mockOpAmpConnection struct { - serverToAgentMsgs []*protobufs.ServerToAgent -} - -func (conn *mockOpAmpConnection) Send(ctx context.Context, msg *protobufs.ServerToAgent) error { - conn.serverToAgentMsgs = append(conn.serverToAgentMsgs, msg) - return nil -} - -func (conn *mockOpAmpConnection) latestMsgFromServer() *protobufs.ServerToAgent { - if len(conn.serverToAgentMsgs) < 1 { - return nil - } - return conn.serverToAgentMsgs[len(conn.serverToAgentMsgs)-1] -} - -func (conn *mockOpAmpConnection) LatestPipelinesReceivedFromServer() ([]logparsingpipeline.Pipeline, error) { - pipelines := []logparsingpipeline.Pipeline{} - lastMsg := conn.latestMsgFromServer() - if lastMsg == nil { - return pipelines, nil - } - - return pipelines, nil -} - -func (conn *mockOpAmpConnection) Disconnect() error { - return nil -} -func (conn *mockOpAmpConnection) RemoteAddr() net.Addr { - return nil -} From 546d98ca9c1f10a06736ddb04d7dbb811fe919ab Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+rkssisodiya@users.noreply.github.com> Date: Sun, 15 Oct 2023 21:04:19 +0530 Subject: [PATCH 02/23] feat: agentConf.manager as opamp conf provider (#3747) * chore: add integration test for log pipelines being recommended to agents on connection * chore: agentConf.Manager as AgentConfigProvider * feat: logparsingpipelines as an AgentFeature * chore: some cleanup * feat: update agentConf init in ee query-service * chore: more cleanup and move opamp/logspipeline -> logparsingpipeline/collector_config.go * chore: some more cleanup * chore: set agent.RemoteConfig after broadcasting conf to all agents * chore: add test scenario for post remote conf application --- ee/query-service/app/server.go | 17 +- pkg/query-service/agentConf/Readme.md | 5 + pkg/query-service/agentConf/agent_features.go | 25 +++ pkg/query-service/agentConf/manager.go | 167 +++++++++++++++--- .../app/logparsingpipeline/agent_feature.go | 5 + .../collector_config.go} | 131 ++++---------- .../collector_config_test.go} | 2 +- .../app/logparsingpipeline/controller.go | 59 +++++-- .../app/opamp/config_provider_test.go | 29 +-- pkg/query-service/app/opamp/mocks.go | 13 ++ pkg/query-service/app/opamp/model/agents.go | 24 ++- pkg/query-service/app/server.go | 13 +- .../integration/logparsingpipeline_test.go | 158 +++++++++++------ 13 files changed, 423 insertions(+), 225 deletions(-) create mode 100644 pkg/query-service/agentConf/Readme.md create mode 100644 pkg/query-service/agentConf/agent_features.go create mode 100644 pkg/query-service/app/logparsingpipeline/agent_feature.go rename pkg/query-service/app/{opamp/logspipeline.go => logparsingpipeline/collector_config.go} (62%) rename pkg/query-service/app/{opamp/logspipeline_test.go => logparsingpipeline/collector_config_test.go} (99%) diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 5dda1e7237..5a1dcd9bd5 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -171,13 +171,18 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { return nil, err } - // initiate agent config handler - if err := agentConf.Initiate(localDB, AppDbEngine); err != nil { + // ingestion pipelines manager + logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite") + if err != nil { return nil, err } - // ingestion pipelines manager - logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(localDB, "sqlite") + // initiate agent config handler + agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{ + DB: localDB, + DBEngine: AppDbEngine, + AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController}, + }) if err != nil { return nil, err } @@ -256,10 +261,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { s.privateHTTP = privateServer - // TODO(Raj): Replace this with actual provider in a follow up PR - agentConfigProvider := opamp.NewMockAgentConfigProvider() s.opampServer = opamp.InitializeServer( - &opAmpModel.AllAgents, agentConfigProvider, + &opAmpModel.AllAgents, agentConfMgr, ) return s, nil diff --git a/pkg/query-service/agentConf/Readme.md b/pkg/query-service/agentConf/Readme.md new file mode 100644 index 0000000000..14cdd63375 --- /dev/null +++ b/pkg/query-service/agentConf/Readme.md @@ -0,0 +1,5 @@ +# Versioned config for agent based features. + +Responsibilities +- Maintain versioned config for registered agent based features like log pipelines etc. +- Provide a combined `AgentConfigProvider` for the opamp server to consume when managing agents diff --git a/pkg/query-service/agentConf/agent_features.go b/pkg/query-service/agentConf/agent_features.go new file mode 100644 index 0000000000..f10d4fbf66 --- /dev/null +++ b/pkg/query-service/agentConf/agent_features.go @@ -0,0 +1,25 @@ +package agentConf + +import "go.signoz.io/signoz/pkg/query-service/model" + +// Interface for features implemented via agent config. +// Eg: ingestion side signal pre-processing features like log processing pipelines etc +type AgentFeature interface { + // Must be unique across `AgentFeature`s + AgentFeatureType() AgentFeatureType + + // Recommend config for an agent based on its `currentConfYaml` and + // `configVersion` for the feature's settings + RecommendAgentConfig( + currentConfYaml []byte, + configVersion *ConfigVersion, + ) ( + recommendedConfYaml []byte, + + // stored as agent_config_versions.last_config in current agentConf model + // TODO(Raj): maybe refactor agentConf further and clean this up + serializedSettingsUsed string, + + apiErr *model.ApiError, + ) +} diff --git a/pkg/query-service/agentConf/manager.go b/pkg/query-service/agentConf/manager.go index e2a5c2239c..a64c93d3fb 100644 --- a/pkg/query-service/agentConf/manager.go +++ b/pkg/query-service/agentConf/manager.go @@ -3,9 +3,13 @@ package agentConf import ( "context" "fmt" + "strings" + "sync" "sync/atomic" + "github.com/google/uuid" "github.com/jmoiron/sqlx" + "github.com/pkg/errors" "go.signoz.io/signoz/pkg/query-service/app/opamp" filterprocessor "go.signoz.io/signoz/pkg/query-service/app/opamp/otelconfig/filterprocessor" tsp "go.signoz.io/signoz/pkg/query-service/app/opamp/otelconfig/tailsampler" @@ -20,10 +24,142 @@ func init() { m = &Manager{} } +type AgentFeatureType string + type Manager struct { Repo // lock to make sure only one update is sent to remote agents at a time lock uint32 + + // For AgentConfigProvider implementation + agentFeatures []AgentFeature + configSubscribers map[string]func() + configSubscribersLock sync.Mutex +} + +type ManagerOptions struct { + DB *sqlx.DB + DBEngine string + + // When acting as opamp.AgentConfigProvider, agent conf recommendations are + // applied to the base conf in the order the features have been specified here. + AgentFeatures []AgentFeature +} + +func Initiate(options *ManagerOptions) (*Manager, error) { + // featureType must be unqiue across registered AgentFeatures. + agentFeatureByType := map[AgentFeatureType]AgentFeature{} + for _, feature := range options.AgentFeatures { + featureType := feature.AgentFeatureType() + if agentFeatureByType[featureType] != nil { + panic(fmt.Sprintf( + "found multiple agent features with type: %s", featureType, + )) + } + agentFeatureByType[featureType] = feature + } + + m = &Manager{ + Repo: Repo{options.DB}, + agentFeatures: options.AgentFeatures, + configSubscribers: map[string]func(){}, + } + + err := m.initDB(options.DBEngine) + if err != nil { + return nil, errors.Wrap(err, "could not init agentConf db") + } + return m, nil +} + +// Implements opamp.AgentConfigProvider +func (m *Manager) SubscribeToConfigUpdates(callback func()) (unsubscribe func()) { + m.configSubscribersLock.Lock() + defer m.configSubscribersLock.Unlock() + + subscriberId := uuid.NewString() + m.configSubscribers[subscriberId] = callback + + return func() { + delete(m.configSubscribers, subscriberId) + } +} + +func (m *Manager) notifyConfigUpdateSubscribers() { + m.configSubscribersLock.Lock() + defer m.configSubscribersLock.Unlock() + for _, handler := range m.configSubscribers { + handler() + } +} + +// Implements opamp.AgentConfigProvider +func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) ( + recommendedConfYaml []byte, + // Opaque id of the recommended config, used for reporting deployment status updates + configId string, + err error, +) { + recommendation := currentConfYaml + settingVersions := []string{} + + for _, feature := range m.agentFeatures { + featureType := ElementTypeDef(feature.AgentFeatureType()) + latestConfig, apiErr := GetLatestVersion(context.Background(), featureType) + if apiErr != nil && apiErr.Type() != model.ErrorNotFound { + return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version") + } + + if latestConfig == nil { + continue + } + + updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig( + recommendation, latestConfig, + ) + if apiErr != nil { + return nil, "", errors.Wrap(apiErr.ToError(), fmt.Sprintf( + "failed to generate agent config recommendation for %s", featureType, + )) + } + recommendation = updatedConf + configId := fmt.Sprintf("%s:%d", featureType, latestConfig.Version) + settingVersions = append(settingVersions, configId) + + m.updateDeployStatus( + context.Background(), + featureType, + latestConfig.Version, + string(DeployInitiated), + "Deployment has started", + configId, + serializedSettingsUsed, + ) + + } + + configId = strings.Join(settingVersions, ",") + return recommendation, configId, nil +} + +// Implements opamp.AgentConfigProvider +func (m *Manager) ReportConfigDeploymentStatus( + agentId string, + configId string, + err error, +) { + featureConfigIds := strings.Split(configId, ",") + for _, featureConfId := range featureConfigIds { + newStatus := string(Deployed) + message := "Deployment was successful" + if err != nil { + newStatus = string(DeployFailed) + message = fmt.Sprintf("%s: %s", agentId, err.Error()) + } + m.updateDeployStatusByHash( + context.Background(), featureConfId, newStatus, message, + ) + } } // Ready indicates if Manager can accept new config update requests @@ -34,10 +170,7 @@ func (mgr *Manager) Ready() bool { return opamp.Ready() } -func Initiate(db *sqlx.DB, engine string) error { - m.Repo = Repo{db} - return m.initDB(engine) -} +// Static methods for working with default manager instance in this module. // Ready indicates if Manager can accept new config update requests func Ready() bool { @@ -81,6 +214,8 @@ func StartNewVersion( return nil, err } + m.notifyConfigUpdateSubscribers() + return cfg, nil } @@ -219,27 +354,3 @@ func UpsertSamplingProcessor(ctx context.Context, version int, config *tsp.Confi m.updateDeployStatus(ctx, ElementTypeSamplingRules, version, string(DeployInitiated), "Deployment started", configHash, string(processorConfYaml)) return nil } - -// UpsertLogParsingProcessors updates the agent with log parsing processors -func UpsertLogParsingProcessor( - ctx context.Context, - version int, - rawPipelineData []byte, - config map[string]interface{}, - names []string, -) *model.ApiError { - if !atomic.CompareAndSwapUint32(&m.lock, 0, 1) { - return model.UnavailableError(fmt.Errorf("agent updater is busy")) - } - defer atomic.StoreUint32(&m.lock, 0) - - // send the changes to opamp. - configHash, err := opamp.UpsertLogsParsingProcessor(context.Background(), config, names, m.OnConfigUpdate) - if err != nil { - zap.S().Errorf("failed to call agent config update for log parsing processor:", err) - return err - } - - m.updateDeployStatus(ctx, ElementTypeLogPipelines, version, string(DeployInitiated), "Deployment has started", configHash, string(rawPipelineData)) - return nil -} diff --git a/pkg/query-service/app/logparsingpipeline/agent_feature.go b/pkg/query-service/app/logparsingpipeline/agent_feature.go new file mode 100644 index 0000000000..3f8cc7df1a --- /dev/null +++ b/pkg/query-service/app/logparsingpipeline/agent_feature.go @@ -0,0 +1,5 @@ +package logparsingpipeline + +import "go.signoz.io/signoz/pkg/query-service/agentConf" + +const LogPipelinesFeatureType agentConf.AgentFeatureType = "log_pipelines" diff --git a/pkg/query-service/app/opamp/logspipeline.go b/pkg/query-service/app/logparsingpipeline/collector_config.go similarity index 62% rename from pkg/query-service/app/opamp/logspipeline.go rename to pkg/query-service/app/logparsingpipeline/collector_config.go index 9ad81fe77c..dfef6070f9 100644 --- a/pkg/query-service/app/opamp/logspipeline.go +++ b/pkg/query-service/app/logparsingpipeline/collector_config.go @@ -1,16 +1,12 @@ -package opamp +package logparsingpipeline import ( - "context" - "crypto/sha256" "encoding/json" "fmt" "strings" "sync" "github.com/knadh/koanf/parsers/yaml" - "github.com/open-telemetry/opamp-go/protobufs" - model "go.signoz.io/signoz/pkg/query-service/app/opamp/model" "go.signoz.io/signoz/pkg/query-service/constants" coreModel "go.signoz.io/signoz/pkg/query-service/model" "go.uber.org/zap" @@ -18,93 +14,7 @@ import ( var lockLogsPipelineSpec sync.RWMutex -func UpsertLogsParsingProcessor( - ctx context.Context, - parsingProcessors map[string]interface{}, - parsingProcessorsNames []string, - callback func(string, string, error), -) (string, *coreModel.ApiError) { - confHash := "" - if opAmpServer == nil { - return confHash, coreModel.UnavailableError(fmt.Errorf( - "opamp server is down, unable to push config to agent at this moment", - )) - } - - agents := opAmpServer.agents.GetAllAgents() - if len(agents) == 0 { - return confHash, coreModel.UnavailableError(fmt.Errorf( - "no agents available at the moment", - )) - } - - for _, agent := range agents { - config := agent.EffectiveConfig - c, err := yaml.Parser().Unmarshal([]byte(config)) - if err != nil { - return confHash, coreModel.BadRequest(err) - } - - buildLogParsingProcessors(c, parsingProcessors) - - p, err := getOtelPipelinFromConfig(c) - if err != nil { - return confHash, coreModel.BadRequest(err) - } - if p.Pipelines.Logs == nil { - return confHash, coreModel.InternalError(fmt.Errorf( - "logs pipeline doesn't exist", - )) - } - - // build the new processor list - updatedProcessorList, _ := buildLogsProcessors(p.Pipelines.Logs.Processors, parsingProcessorsNames) - p.Pipelines.Logs.Processors = updatedProcessorList - - // add the new processor to the data ( no checks required as the keys will exists) - c["service"].(map[string]interface{})["pipelines"].(map[string]interface{})["logs"] = p.Pipelines.Logs - - updatedConf, err := yaml.Parser().Marshal(c) - if err != nil { - return confHash, coreModel.BadRequest(err) - } - - // zap.S().Infof("sending new config", string(updatedConf)) - hash := sha256.New() - _, err = hash.Write(updatedConf) - if err != nil { - return confHash, coreModel.InternalError(err) - } - agent.EffectiveConfig = string(updatedConf) - err = agent.Upsert() - if err != nil { - return confHash, coreModel.InternalError(err) - } - - agent.SendToAgent(&protobufs.ServerToAgent{ - RemoteConfig: &protobufs.AgentRemoteConfig{ - Config: &protobufs.AgentConfigMap{ - ConfigMap: map[string]*protobufs.AgentConfigFile{ - "collector.yaml": { - Body: updatedConf, - ContentType: "application/x-yaml", - }, - }, - }, - ConfigHash: hash.Sum(nil), - }, - }) - - if confHash == "" { - confHash = string(hash.Sum(nil)) - model.ListenToConfigUpdate(agent.ID, confHash, callback) - } - } - - return confHash, nil -} - -// check if the processors already exist +// check if the processors already exis // if yes then update the processor. // if something doesn't exists then remove it. func buildLogParsingProcessors(agentConf, parsingProcessors map[string]interface{}) error { @@ -233,3 +143,40 @@ func checkDuplicateString(pipeline []string) bool { } return false } + +func GenerateCollectorConfigWithPipelines( + config []byte, + parsingProcessors map[string]interface{}, + parsingProcessorsNames []string, +) ([]byte, *coreModel.ApiError) { + c, err := yaml.Parser().Unmarshal([]byte(config)) + if err != nil { + return nil, coreModel.BadRequest(err) + } + + buildLogParsingProcessors(c, parsingProcessors) + + p, err := getOtelPipelinFromConfig(c) + if err != nil { + return nil, coreModel.BadRequest(err) + } + if p.Pipelines.Logs == nil { + return nil, coreModel.InternalError(fmt.Errorf( + "logs pipeline doesn't exist", + )) + } + + // build the new processor list + updatedProcessorList, _ := buildLogsProcessors(p.Pipelines.Logs.Processors, parsingProcessorsNames) + p.Pipelines.Logs.Processors = updatedProcessorList + + // add the new processor to the data ( no checks required as the keys will exists) + c["service"].(map[string]interface{})["pipelines"].(map[string]interface{})["logs"] = p.Pipelines.Logs + + updatedConf, err := yaml.Parser().Marshal(c) + if err != nil { + return nil, coreModel.BadRequest(err) + } + + return updatedConf, nil +} diff --git a/pkg/query-service/app/opamp/logspipeline_test.go b/pkg/query-service/app/logparsingpipeline/collector_config_test.go similarity index 99% rename from pkg/query-service/app/opamp/logspipeline_test.go rename to pkg/query-service/app/logparsingpipeline/collector_config_test.go index eef08870dd..8ef79875d5 100644 --- a/pkg/query-service/app/opamp/logspipeline_test.go +++ b/pkg/query-service/app/logparsingpipeline/collector_config_test.go @@ -1,4 +1,4 @@ -package opamp +package logparsingpipeline import ( "fmt" diff --git a/pkg/query-service/app/logparsingpipeline/controller.go b/pkg/query-service/app/logparsingpipeline/controller.go index fc10047c36..72b6c6b76e 100644 --- a/pkg/query-service/app/logparsingpipeline/controller.go +++ b/pkg/query-service/app/logparsingpipeline/controller.go @@ -10,6 +10,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/agentConf" "go.signoz.io/signoz/pkg/query-service/auth" "go.signoz.io/signoz/pkg/query-service/model" + "go.uber.org/multierr" "go.uber.org/zap" ) @@ -72,15 +73,6 @@ func (ic *LogParsingPipelineController) ApplyPipelines( } - // prepare filter config (processor) from the pipelines - filterConfig, names, translationErr := PreparePipelineProcessor(pipelines) - if translationErr != nil { - zap.S().Errorf("failed to generate processor config from pipelines for deployment %w", translationErr) - return nil, model.BadRequest(errors.Wrap( - translationErr, "failed to generate processor config from pipelines for deployment", - )) - } - if !agentConf.Ready() { return nil, model.UnavailableError(fmt.Errorf( "agent updater unavailable at the moment. Please try in sometime", @@ -99,12 +91,6 @@ func (ic *LogParsingPipelineController) ApplyPipelines( return nil, err } - zap.S().Info("applying drop pipeline config", cfg) - // raw pipeline is needed since filterConfig doesn't contain inactive pipelines and operators - rawPipelineData, _ := json.Marshal(pipelines) - - // queue up the config to push to opamp - err = agentConf.UpsertLogParsingProcessor(ctx, cfg.Version, rawPipelineData, filterConfig, names) history, _ := agentConf.GetConfigHistory(ctx, agentConf.ElementTypeLogPipelines, 10) insertedCfg, _ := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, cfg.Version) @@ -166,3 +152,46 @@ func (ic *LogParsingPipelineController) PreviewLogsPipelines( OutputLogs: result, }, nil } + +// Implements agentConf.AgentFeature interface. +func (pc *LogParsingPipelineController) AgentFeatureType() agentConf.AgentFeatureType { + return LogPipelinesFeatureType +} + +// Implements agentConf.AgentFeature interface. +func (pc *LogParsingPipelineController) RecommendAgentConfig( + currentConfYaml []byte, + configVersion *agentConf.ConfigVersion, +) ( + recommendedConfYaml []byte, + serializedSettingsUsed string, + apiErr *model.ApiError, +) { + + pipelines, errs := pc.getPipelinesByVersion( + context.Background(), configVersion.Version, + ) + if len(errs) > 0 { + return nil, "", model.InternalError(multierr.Combine(errs...)) + } + + processors, procNames, err := PreparePipelineProcessor(pipelines) + if err != nil { + return nil, "", model.BadRequest(errors.Wrap(err, "could not prepare otel collector processors for log pipelines")) + } + + updatedConf, apiErr := GenerateCollectorConfigWithPipelines( + currentConfYaml, processors, procNames, + ) + if apiErr != nil { + return nil, "", model.WrapApiError(apiErr, "could not marshal yaml for updated conf") + } + + rawPipelineData, err := json.Marshal(pipelines) + if err != nil { + return nil, "", model.BadRequest(errors.Wrap(err, "could not serialize pipelines to JSON")) + } + + return updatedConf, string(rawPipelineData), nil + +} diff --git a/pkg/query-service/app/opamp/config_provider_test.go b/pkg/query-service/app/opamp/config_provider_test.go index 083c396a41..6718ff1581 100644 --- a/pkg/query-service/app/opamp/config_provider_test.go +++ b/pkg/query-service/app/opamp/config_provider_test.go @@ -2,8 +2,6 @@ package opamp import ( "fmt" - "log" - "net" "os" "testing" @@ -137,6 +135,21 @@ func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) { ) require.False(tb.testConfigProvider.ReportedDeploymentStatuses[expectedConfId][agent2Id]) + lastAgent1Msg = agent1Conn.LatestMsgFromServer() + agent1Conn.ClearMsgsFromServer() + response := tb.opampServer.OnMessage(agent1Conn, &protobufs.AgentToServer{ + InstanceUid: agent1Id, + RemoteConfigStatus: &protobufs.RemoteConfigStatus{ + Status: protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED, + LastRemoteConfigHash: lastAgent1Msg.RemoteConfig.ConfigHash, + }, + }) + require.Nil(response.RemoteConfig) + require.Nil( + agent1Conn.LatestMsgFromServer(), + "server should not recommend a config if agent is reporting back with status on a broadcasted config", + ) + require.Equal(1, len(tb.testConfigProvider.ConfigUpdateSubscribers)) tb.opampServer.Stop() require.Equal( @@ -242,15 +255,3 @@ func initialAgentConf() *protobufs.AgentConfigMap { `), ) } - -// Brought in from https://github.com/open-telemetry/opamp-go/blob/main/internal/testhelpers/nethelpers.go -func GetAvailableLocalAddress() string { - ln, err := net.Listen("tcp", "127.0.0.1:") - if err != nil { - log.Fatalf("failed to get a free local port: %v", err) - } - // There is a possible race if something else takes this same port before - // the test uses it, however, that is unlikely in practice. - defer ln.Close() - return ln.Addr().String() -} diff --git a/pkg/query-service/app/opamp/mocks.go b/pkg/query-service/app/opamp/mocks.go index 705fe38bcf..12e9410989 100644 --- a/pkg/query-service/app/opamp/mocks.go +++ b/pkg/query-service/app/opamp/mocks.go @@ -2,6 +2,7 @@ package opamp import ( "context" + "log" "net" "github.com/google/uuid" @@ -132,3 +133,15 @@ func (ta *MockAgentConfigProvider) NotifySubscribersOfChange() { callback() } } + +// Brought in from https://github.com/open-telemetry/opamp-go/blob/main/internal/testhelpers/nethelpers.go +func GetAvailableLocalAddress() string { + ln, err := net.Listen("tcp", "127.0.0.1:") + if err != nil { + log.Fatalf("failed to get a free local port: %v", err) + } + // There is a possible race if something else takes this same port before + // the test uses it, however, that is unlikely in practice. + defer ln.Close() + return ln.Addr().String() +} diff --git a/pkg/query-service/app/opamp/model/agents.go b/pkg/query-service/app/opamp/model/agents.go index e835ee8ccc..50a554b957 100644 --- a/pkg/query-service/app/opamp/model/agents.go +++ b/pkg/query-service/app/opamp/model/agents.go @@ -142,18 +142,24 @@ func (agents *Agents) RecommendLatestConfigToAll( return nil } - agent.SendToAgent(&protobufs.ServerToAgent{ - RemoteConfig: &protobufs.AgentRemoteConfig{ - Config: &protobufs.AgentConfigMap{ - ConfigMap: map[string]*protobufs.AgentConfigFile{ - CollectorConfigFilename: { - Body: newConfig, - ContentType: "application/x-yaml", - }, + newRemoteConfig := &protobufs.AgentRemoteConfig{ + Config: &protobufs.AgentConfigMap{ + ConfigMap: map[string]*protobufs.AgentConfigFile{ + CollectorConfigFilename: { + Body: newConfig, + ContentType: "application/x-yaml", }, }, - ConfigHash: []byte(confId), }, + ConfigHash: []byte(confId), + } + + agent.mux.Lock() + defer agent.mux.Unlock() + agent.remoteConfig = newRemoteConfig + + agent.SendToAgent(&protobufs.ServerToAgent{ + RemoteConfig: newRemoteConfig, }) ListenToConfigUpdate(agent.ID, confId, provider.ReportConfigDeploymentStatus) diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 6e3f267491..08fb4e7850 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -203,14 +203,19 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { return nil, err } - if err := agentConf.Initiate(localDB, "sqlite"); err != nil { + agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{ + DB: localDB, + DBEngine: "sqlite", + AgentFeatures: []agentConf.AgentFeature{ + logParsingPipelineController, + }, + }) + if err != nil { return nil, err } - // TODO(Raj): Replace this with actual provider in a follow up PR - agentConfigProvider := opamp.NewMockAgentConfigProvider() s.opampServer = opamp.InitializeServer( - &opAmpModel.AllAgents, agentConfigProvider, + &opAmpModel.AllAgents, agentConfMgr, ) return s, nil diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go index 0a1105e63c..2e1b70c1db 100644 --- a/pkg/query-service/tests/integration/logparsingpipeline_test.go +++ b/pkg/query-service/tests/integration/logparsingpipeline_test.go @@ -12,6 +12,7 @@ import ( "strings" "testing" + "github.com/google/uuid" "github.com/gorilla/mux" "github.com/jmoiron/sqlx" "github.com/knadh/koanf/parsers/yaml" @@ -108,6 +109,7 @@ func TestLogPipelinesLifecycle(t *testing.T) { t, postablePipelines, createPipelinesResp, ) testbed.assertPipelinesSentToOpampClient(createPipelinesResp.Pipelines) + testbed.assertNewAgentGetsPipelinesOnConnection(createPipelinesResp.Pipelines) // Should be able to get the configured pipelines. getPipelinesResp = testbed.GetPipelinesFromQS() @@ -133,7 +135,8 @@ func TestLogPipelinesLifecycle(t *testing.T) { t, postablePipelines, getPipelinesResp, ) assert.Equal( - getPipelinesResp.History[0].DeployStatus, agentConf.Deployed, + agentConf.Deployed, + getPipelinesResp.History[0].DeployStatus, "pipeline deployment should be complete after acknowledgment from opamp client", ) @@ -144,6 +147,7 @@ func TestLogPipelinesLifecycle(t *testing.T) { t, postablePipelines, updatePipelinesResp, ) testbed.assertPipelinesSentToOpampClient(updatePipelinesResp.Pipelines) + testbed.assertNewAgentGetsPipelinesOnConnection(updatePipelinesResp.Pipelines) assert.Equal( 2, len(updatePipelinesResp.History), @@ -162,7 +166,8 @@ func TestLogPipelinesLifecycle(t *testing.T) { t, postablePipelines, getPipelinesResp, ) assert.Equal( - getPipelinesResp.History[0].DeployStatus, agentConf.Deployed, + agentConf.Deployed, + getPipelinesResp.History[0].DeployStatus, "deployment for latest pipeline config should be complete after acknowledgment from opamp client", ) } @@ -332,10 +337,7 @@ func NewLogPipelinesTestBed(t *testing.T) *LogPipelinesTestBed { t.Fatalf("could not create a new ApiHandler: %v", err) } - opampServer, clientConn, err := mockOpampAgent(testDBFilePath) - if err != nil { - t.Fatalf("could not create opamp server and mock client connection: %v", err) - } + opampServer, clientConn := mockOpampAgent(t, testDBFilePath, controller) user, apiErr := createTestUser() if apiErr != nil { @@ -447,16 +449,26 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( pipelines []logparsingpipeline.Pipeline, ) { lastMsg := tb.opampClientConn.LatestMsgFromServer() - collectorConfigFiles := lastMsg.RemoteConfig.Config.ConfigMap + assertPipelinesRecommendedInRemoteConfig( + tb.t, lastMsg, pipelines, + ) +} + +func assertPipelinesRecommendedInRemoteConfig( + t *testing.T, + msg *protobufs.ServerToAgent, + pipelines []logparsingpipeline.Pipeline, +) { + collectorConfigFiles := msg.RemoteConfig.Config.ConfigMap assert.Equal( - tb.t, len(collectorConfigFiles), 1, + t, len(collectorConfigFiles), 1, "otel config sent to client is expected to contain atleast 1 file", ) collectorConfigYaml := maps.Values(collectorConfigFiles)[0].Body collectorConfSentToClient, err := yaml.Parser().Unmarshal(collectorConfigYaml) if err != nil { - tb.t.Fatalf("could not unmarshal config file sent to opamp client: %v", err) + t.Fatalf("could not unmarshal config file sent to opamp client: %v", err) } // Each pipeline is expected to become its own processor @@ -477,14 +489,14 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( _, expectedLogProcessorNames, err := logparsingpipeline.PreparePipelineProcessor(pipelines) assert.Equal( - tb.t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames, + t, expectedLogProcessorNames, collectorConfLogsPipelineProcNames, "config sent to opamp client doesn't contain expected log pipelines", ) collectorConfProcessors := collectorConfSentToClient["processors"].(map[string]interface{}) for _, procName := range expectedLogProcessorNames { pipelineProcessorInConf, procExists := collectorConfProcessors[procName] - assert.True(tb.t, procExists, fmt.Sprintf( + assert.True(t, procExists, fmt.Sprintf( "%s processor not found in config sent to opamp client", procName, )) @@ -497,7 +509,7 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( pipelineProcOps, func(op interface{}) bool { return op.(map[string]interface{})["id"] == "router_signoz" }, ) - require.GreaterOrEqual(tb.t, routerOpIdx, 0) + require.GreaterOrEqual(t, routerOpIdx, 0) routerOproutes := pipelineProcOps[routerOpIdx].(map[string]interface{})["routes"].([]interface{}) pipelineFilterExpr := routerOproutes[0].(map[string]interface{})["expr"].(string) @@ -507,10 +519,10 @@ func (tb *LogPipelinesTestBed) assertPipelinesSentToOpampClient( return logparsingpipeline.CollectorConfProcessorName(p) == procName }, ) - require.GreaterOrEqual(tb.t, pipelineIdx, 0) + require.GreaterOrEqual(t, pipelineIdx, 0) expectedExpr, err := queryBuilderToExpr.Parse(pipelines[pipelineIdx].Filter) - require.Nil(tb.t, err) - require.Equal(tb.t, expectedExpr, pipelineFilterExpr) + require.Nil(t, err) + require.Equal(t, expectedExpr, pipelineFilterExpr) } } @@ -528,6 +540,26 @@ func (tb *LogPipelinesTestBed) simulateOpampClientAcknowledgementForLatestConfig }) } +func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection( + pipelines []logparsingpipeline.Pipeline, +) { + newAgentConn := &opamp.MockOpAmpConnection{} + tb.opampServer.OnMessage( + newAgentConn, + &protobufs.AgentToServer{ + InstanceUid: uuid.NewString(), + EffectiveConfig: &protobufs.EffectiveConfig{ + ConfigMap: newInitialAgentConfigMap(), + }, + }, + ) + latestMsgFromServer := newAgentConn.LatestMsgFromServer() + require.NotNil(tb.t, latestMsgFromServer) + assertPipelinesRecommendedInRemoteConfig( + tb.t, latestMsgFromServer, pipelines, + ) +} + func unmarshalPipelinesResponse(apiResponse *app.ApiResponse) ( *logparsingpipeline.PipelinesResponse, error, @@ -563,58 +595,74 @@ func assertPipelinesResponseMatchesPostedPipelines( } } -func mockOpampAgent(testDBFilePath string) (*opamp.Server, *opamp.MockOpAmpConnection, error) { +func mockOpampAgent( + t *testing.T, + testDBFilePath string, + pipelinesController *logparsingpipeline.LogParsingPipelineController, +) (*opamp.Server, *opamp.MockOpAmpConnection) { // Mock an available opamp agent testDB, err := opampModel.InitDB(testDBFilePath) - if err != nil { - return nil, nil, err - } - err = agentConf.Initiate(testDB, "sqlite") - if err != nil { - return nil, nil, err - } + require.Nil(t, err, "failed to init opamp model") + + agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{ + DB: testDB, + DBEngine: "sqlite", + AgentFeatures: []agentConf.AgentFeature{pipelinesController}, + }) + require.Nil(t, err, "failed to init agentConf") + + opampServer := opamp.InitializeServer(nil, agentConfMgr) + err = opampServer.Start(opamp.GetAvailableLocalAddress()) + require.Nil(t, err, "failed to start opamp server") + + t.Cleanup(func() { + opampServer.Stop() + }) - opampServer := opamp.InitializeServer(nil, opamp.NewMockAgentConfigProvider()) opampClientConnection := &opamp.MockOpAmpConnection{} opampServer.OnMessage( opampClientConnection, &protobufs.AgentToServer{ InstanceUid: "test", EffectiveConfig: &protobufs.EffectiveConfig{ - ConfigMap: &protobufs.AgentConfigMap{ - ConfigMap: map[string]*protobufs.AgentConfigFile{ - "otel-collector.yaml": { - Body: []byte(` - receivers: - otlp: - protocols: - grpc: - endpoint: 0.0.0.0:4317 - http: - endpoint: 0.0.0.0:4318 - processors: - batch: - send_batch_size: 10000 - send_batch_max_size: 11000 - timeout: 10s - exporters: - otlp: - endpoint: otelcol2:4317 - service: - pipelines: - logs: - receivers: [otlp] - processors: [batch] - exporters: [otlp] - `), - ContentType: "text/yaml", - }, - }, - }, + ConfigMap: newInitialAgentConfigMap(), }, }, ) - return opampServer, opampClientConnection, nil + return opampServer, opampClientConnection +} + +func newInitialAgentConfigMap() *protobufs.AgentConfigMap { + return &protobufs.AgentConfigMap{ + ConfigMap: map[string]*protobufs.AgentConfigFile{ + "otel-collector.yaml": { + Body: []byte(` + receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + processors: + batch: + send_batch_size: 10000 + send_batch_max_size: 11000 + timeout: 10s + exporters: + otlp: + endpoint: otelcol2:4317 + service: + pipelines: + logs: + receivers: [otlp] + processors: [batch] + exporters: [otlp] + `), + ContentType: "text/yaml", + }, + }, + } } func createTestUser() (*model.User, *model.ApiError) { From 3620cdb5d25f13861832a313cd37516a6083fe98 Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+rkssisodiya@users.noreply.github.com> Date: Mon, 16 Oct 2023 08:40:00 +0530 Subject: [PATCH 03/23] Feat: opamp managed otel collector (#3746) * feat: add opamp config for running managed collector * feat: docker compose config for managed otel collector --- deploy/docker/clickhouse-setup/docker-compose.yaml | 2 ++ deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml | 1 + 2 files changed, 3 insertions(+) create mode 100644 deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 9b06be11bc..163179ffb9 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -218,11 +218,13 @@ services: command: [ "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName" ] user: root # required for reading docker container logs volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - /var/lib/docker/containers:/var/lib/docker/containers:ro environment: - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux diff --git a/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml b/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml new file mode 100644 index 0000000000..e408b55ef6 --- /dev/null +++ b/deploy/docker/clickhouse-setup/otel-collector-opamp-config.yaml @@ -0,0 +1 @@ +server_endpoint: ws://query-service:4320/v1/opamp From 5b858f2963cf74ae81079c78bfedc24fbe245af5 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Mon, 16 Oct 2023 23:57:44 +0530 Subject: [PATCH 04/23] Billing UI (#3711) * feat: integrate billing api and wire up billing ui * feat: show billing to admin only if on plans other than basic plan * feat: show billing to admin only if on plans other than basic plan * feat: update notfound snapshot * chore: fix billing sidenav logic * chore: fix several bugs * chore: backend fix for billing * fix: window.open pop blocker issue and error ui (#3750) --------- Co-authored-by: Srikanth Chekuri Co-authored-by: Rajat Dabade --- ee/query-service/app/api/license.go | 15 +- ee/query-service/model/license.go | 12 +- frontend/public/Images/notFound404.png | Bin 0 -> 43988 bytes frontend/public/locales/en-GB/titles.json | 78 ++-- frontend/public/locales/en/titles.json | 78 ++-- frontend/src/AppRoutes/Private.tsx | 130 ++++-- frontend/src/AppRoutes/index.tsx | 17 +- frontend/src/AppRoutes/pageComponents.ts | 9 + frontend/src/AppRoutes/routes.ts | 17 + frontend/src/api/billing/checkout.ts | 31 ++ frontend/src/api/billing/getUsage.ts | 35 ++ frontend/src/api/dashboard/create.ts | 4 +- frontend/src/api/licenses/getAll.ts | 2 +- frontend/src/assets/NotFound.tsx | 266 +---------- .../__snapshots__/NotFound.test.tsx.snap | 271 +---------- frontend/src/constants/reactQueryKeys.ts | 1 + frontend/src/constants/routes.ts | 2 + frontend/src/container/AppLayout/index.tsx | 3 +- .../BillingContainer.styles.scss | 36 ++ .../BillingContainer/BillingContainer.tsx | 432 ++++++++++++++++++ .../src/container/Header/Header.styles.scss | 12 + .../container/Header/ManageLicense/index.tsx | 2 +- frontend/src/container/Header/index.tsx | 144 ++++-- .../src/container/Licenses/ListLicenses.tsx | 3 +- frontend/src/container/Licenses/index.tsx | 2 +- frontend/src/container/SideNav/SideNav.tsx | 16 +- frontend/src/container/SideNav/config.ts | 1 + frontend/src/container/SideNav/menuItems.tsx | 6 + .../container/TopNav/Breadcrumbs/index.tsx | 4 +- .../TopNav/DateTimeSelection/config.ts | 2 + frontend/src/hooks/useLicense/constant.ts | 1 + frontend/src/hooks/useUsage/useUsage.tsx | 25 + .../src/pages/Billing/BillingPage.styles.scss | 5 + frontend/src/pages/Billing/BillingPage.tsx | 13 + frontend/src/pages/Billing/index.tsx | 3 + .../WorkspaceLocked.styles.scss | 19 + .../pages/WorkspaceLocked/WorkspaceLocked.tsx | 97 ++++ frontend/src/pages/WorkspaceLocked/index.tsx | 3 + frontend/src/types/api/billing/checkout.ts | 9 + frontend/src/types/api/licenses/getAll.ts | 10 +- frontend/src/utils/permission/index.ts | 4 +- 41 files changed, 1104 insertions(+), 716 deletions(-) create mode 100644 frontend/public/Images/notFound404.png create mode 100644 frontend/src/api/billing/checkout.ts create mode 100644 frontend/src/api/billing/getUsage.ts create mode 100644 frontend/src/container/BillingContainer/BillingContainer.styles.scss create mode 100644 frontend/src/container/BillingContainer/BillingContainer.tsx create mode 100644 frontend/src/container/Header/Header.styles.scss create mode 100644 frontend/src/hooks/useUsage/useUsage.tsx create mode 100644 frontend/src/pages/Billing/BillingPage.styles.scss create mode 100644 frontend/src/pages/Billing/BillingPage.tsx create mode 100644 frontend/src/pages/Billing/index.tsx create mode 100644 frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss create mode 100644 frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx create mode 100644 frontend/src/pages/WorkspaceLocked/index.tsx create mode 100644 frontend/src/types/api/billing/checkout.ts diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index a24ba122d2..c125fd10d1 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -30,6 +30,7 @@ type details struct { Total float64 `json:"total"` Breakdown []usageResponse `json:"breakdown"` BaseFee float64 `json:"baseFee"` + BillTotal float64 `json:"billTotal"` } type billingDetails struct { @@ -147,11 +148,13 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { } resp := model.Licenses{ - TrialStart: -1, - TrialEnd: -1, - OnTrial: false, - WorkSpaceBlock: false, - Licenses: licenses, + TrialStart: -1, + TrialEnd: -1, + OnTrial: false, + WorkSpaceBlock: false, + TrialConvertedToSubscription: false, + GracePeriodEnd: -1, + Licenses: licenses, } var currentActiveLicenseKey string @@ -216,6 +219,8 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { resp.TrialEnd = trialRespData.Data.TrialEnd resp.OnTrial = trialRespData.Data.OnTrial resp.WorkSpaceBlock = trialRespData.Data.WorkSpaceBlock + resp.TrialConvertedToSubscription = trialRespData.Data.TrialConvertedToSubscription + resp.GracePeriodEnd = trialRespData.Data.GracePeriodEnd ah.Respond(w, resp) } diff --git a/ee/query-service/model/license.go b/ee/query-service/model/license.go index 3ba89cf456..7ad349c9b7 100644 --- a/ee/query-service/model/license.go +++ b/ee/query-service/model/license.go @@ -91,11 +91,13 @@ func (l *License) ParseFeatures() { } type Licenses struct { - TrialStart int64 `json:"trialStart"` - TrialEnd int64 `json:"trialEnd"` - OnTrial bool `json:"onTrial"` - WorkSpaceBlock bool `json:"workSpaceBlock"` - Licenses []License `json:"licenses"` + TrialStart int64 `json:"trialStart"` + TrialEnd int64 `json:"trialEnd"` + OnTrial bool `json:"onTrial"` + WorkSpaceBlock bool `json:"workSpaceBlock"` + TrialConvertedToSubscription bool `json:"trialConvertedToSubscription"` + GracePeriodEnd int64 `json:"gracePeriodEnd"` + Licenses []License `json:"licenses"` } type SubscriptionServerResp struct { diff --git a/frontend/public/Images/notFound404.png b/frontend/public/Images/notFound404.png new file mode 100644 index 0000000000000000000000000000000000000000..f80372413856425983d078ae25e1aac63272e8ad GIT binary patch literal 43988 zcmcG#2{_f!_cyA!h@4Cr;|N7m=4iq}5*5b~2W8IuB^03&4#!kTri9EKA$$=k=OAOs z5DxM+6(W^6-2FY(|NndMz3=-z_j&HyIajna0hzwm*M|hnJ&&Z2aE*{(iC5dkOrs-9j5f zLWZoY7Ta!7sl(v^KV6%fo1hEOfRAWP`|ZV6FVF&L0N1dURYSvPD)lqH_tvnrHT~l- z=>01adAT3dzz%YPArKc+qwceH)p3=RfA;o&fe zRNmBxQvUe?1VAP5uerAN&mTZ%AoFEW<3LCA+}sEtsIMOcTmY^M>Mpe0$|@Jv?B@>? z;)5bccXTes#`?WP-aVU1jH7+3Y0D_P?^)KG?+2Hqwlx-Q#zZA07Dh%sZON|w7@g$d zQJI+7vflm5*E>XB{*tw2ad7b4{LpM?ZS7z;cMbInL;dWH+!RGcJ5SG;H^iq#Mz+4b zNp&^#!~HF!TV6+wcy@KZTk`e&h*mu+dO zsiyX&x>`{|@qHU@sH$Oerm4T+)%rru>gR$U_nSj~pC(^7^?d06<_@iVirbv&>WocK zeNxq)S`;4xM@E!Sjefj*+3ag_){ieAS{rE%4MU3^_nOIX=P8W~QL&qEUo5_B-B_IX z-O{>Qk+b%_yX$4c+Du1nZQtr>$(b`M8*4w6l}_8+-wh5XM@7M#>#OU(Mt;u^tPDI` zZ1Y+A7`8U?s<5!b)YNtDOa1fbjSnAI#mCo(in^XU^$Zw#$kI|yPHywTgUz*UwP)j$Xs)D z_Cub@&X0##c5P|jSYjCZGHJlTAjqJFQ^DWrpBcOzo-%p;%jBBTm1B?ciHA&FwkSl8 z>pc?OeP1y6P*%zfCih!0T0V9=w+;Uu<*h&A!Y6s<(CbGr)6b;S?X~!{3WT!L1gW-} z+aHHH+E>bIV zmcD*DWKIM%P$zrJE^m37YcGFaNe3d1Ha#RqwrTC8byK@*zB&8VR-pc(snp*ebulZ` zk@`k=_&Vw^!q@$FEgpP&v5)MZeh#{j?a4s;Sz-q-AUt+j7D7oVo9o})cCAskcqu@1Q#MYm3l!BQmo z9j9_hUmjh%E2MIc^%!n9c(9Z}7g@&Ql-wG&vF?0kAF5XdW5}YeZLCOr)zr@{2&_3oFd(G?}^ZEHt^pZ*^Od=F@CFKp;%JcF}WR` z+k-f_kG-4aD>#Zd_#idE{tZaQ-4j#ljmio z@dN(CfI(Dj^mq4mjHb&km;UmqC-+4-4N={Yc6i$v4KTjy(kwHz>!{i64-7_ZMB3Yq zP%UD>3T-cPUC;BR;tL1M-&I_tXenyv0hiF?cQoWmx;D(=g62_n&M2B`*~p2zFm8zxu-BlHwGS|PkX!4=bnuQDkcvUD5Z`U~XaF$vIGHIf%MB#Q` z0Na?vRBuj~ae2rS^;keCLmG8z=L8{dF&pC|qAs@<^#g&y+5`!9aNE^Hz+e zt<4P)&mNU-=f@C`x^WmM>$3;`)z!>7dSqYuh)d#|zpKbszB#)Obmoi`@FMcY0ejqx|t07cz zPp|u$+3GfIXQDG-br`Np@CLA9(5McqbSgp@-}ea2^bCr^b|&HsD}Y_IC=Ma=?kF=l28^&Q%Yf--$BNd1KFuSDI*@8eBsZ;3yTSB2fA(RxqDHPBF*kglD zOfL^dl(`!%jhTSfsHA(O{g`THb*_@a&y!}cAxcOs~kvT!4qAbDe@#69` zbhyk2aS%earmyOSztQ{4*Sq-$xMsYcy zJpNxzRA7{Epea^o8XE>9;uU-V29)n1)5*FqBPx&*ez9rIk@l4jlH6yQI*kb7 zfM42%zI1fbhgrR;=YDZ;p?Vfvy{^iK#`Ft#@J?k)v7dXKAb+ zRCNrLqP%AC2z9{aszE8~Iz*H))sYRl9Rb&>@p9c0HLO>LxM8&=92(_4MJNS5(@JStS=sl#CpIL&SJ3?{ya zsQhC1dhmPk*}DfjK^)9?&AldZ1HQa`F_t5~mpJX@eS|>dsOZ>tgB6M`)gZ_DR$k9T zSVCa}jxc9vq(tJW=sz{;)W?=;!6Njhf!c zJo7?HWFvbqFlpa6$LX2buPyxnrL*7GB-1C}s+`SJp%l9%zD>&IsGz1a-KH*hx(tLllN*xC{R*yUZ$2w5&8V>1>{e2kdl{T~@SDGJAknJUS=G7I z847x9=dseaG&4|KF{Wy@jxll&nQ{N*Oeprqn@*I!-s_j+7j(BJf#2T*@Q`(+o#zLT zAI4D4nit=ZH81;gjM0w|#4gbp9~M4ZT`wn0=tm~yh>Et;rs*FzeQ};Xri787w^u(^ z?{g_wUiO{B#Q%O^HrJZA_oM6W^16;FC$Jq%))YEaP&r(hS{Oc*FZQ@yf{81Cj=G^z}GjtS`gt`XyHjAX+zEN`wO!He{AErn zT#$7S2B@J+(OJo4!jd0PR%Fq-Fk=UM{dgUD#nkz@$uNuXF(Mo+JlpFpqF8rE45@NG z#TGs)83`k|V2+3^j_w!YWIIhWuAERsG=^c~M~J2d?9jr|9L_Ubtqgj0 zZ$lNOEg)wN3s&faCc!u=>*)A}xoC|WEQ$utT#(OWGzr(DvTU!3&aU2f z7-?YG8C~1Pd`egxX}Uw@KfZCFNRme$bEp#rnew*aX|=);d!w7WxCXS-pv|Te9oa?EsJW|5G z$b?jm?F+=mOH@WWOi{JC{`g+36q(56ij|U)fvCl*!4k8tN3<}S*5wC6R4D2kkODiH z?#>B3%}(am*G0KIhYcv0XX}0%maNb`jxY%m^M}8Lh8q$lhsJ`{F`DPR@&JzWI9k#6 z15iRAo6%90_E7lkq|A>#m)S&ih;l={s2FH_2-ADJ{UX8Lj#pX_IxVrEpIx0#r7s6; zUh4ayI(HR{oYz$vjVM8Tze-ImuaCh-62E;r!4pU%2_gMJ;!ge~3#jnvPQ@;{BM7k{ z^+2g$;#xNo*G#D6cc*&FtFSdyM~T^Mn*fBxu<7Fhfy9;oN=o%LQK`Nb#DoC(t;HN< zRp)!t*X?|6Cpco_Xh<*@p3uVS;UljbruZcJ6zvA?Z-*VPZfg;oWb*RK$4W;%$~_Q9 z#gLiM{14}acsaa3e}7KQe|GlrobI0aX%|mw8F!2d*urHXxm4#Tc`&EbyS}F%auSk= zYe^OlzOIwF0tArWbr;gvIVt+W3589Do#7b|og#SgWrHdZeLoz2N8YAk15TK_lo!0*b^Hz$o;DU!ET%W^s59qIuumm5V# zS3p)40G)EWlmASPgTc#LBCn(zhtcvyeR;#|0XHz49sE$tH-8!+N-D%kbrK~!+nUD1 z9N#uf#qxs02Yw;!;vSN^B#8XT^i3gZ%Oe-eWZWwe7u~fC+NmSpxI)agCZ#S5BJMWc zS12NUkX6tJJu>_)2FMPDn-`Z>682%G&icp`;pf6+MOs*x^7~^bn`tW=Ev3#r=29mh zAg0*+Q#}tpWk(T~bWkrkxCTU~yOt5P&mb3umX*Ji-tddA(G*or1_d00PpD^(Q&!W7 z+Cpe|k^{By37yx##x*MSH{iV z&9Ar=hN7ES1hbs|twITM)wx2++z)YNA&d%#%pqqp`eb-!rWQXQ#1XzEx)liZ5c7|7 z7{Nj#5#4QyPsD*AJjJ^FaLZ2J6hyXG)E-9kKni$%g(z`Hs58v5`3)Y(cjtgWrb+m2 z7z%e`$sh295hg8{f}yj2V5~&?!$C<=jJ~p1u=?~Q1xtq}t=zdln2IGZY9_E7BuwPN2cTj|G+B)EgyU$T}Z@QQEo})U;v)?5$btVEUoJ2Si!X)mH3*U6S@cYvr9oVpy6&ktmp*T1P zSibl?b?}oh)pL=ALe10VA`4DB$vWzocpr+SJb<$Jp*Bn656VX!EOB>GTL2aPbP2}636?ysp^Jx?Oi%Qis=p%`U!-;H+SfZB*KQmksi;cX8{ zrh`a_zC_hW!S6U~cJ5$>-hAP{k@%BKitpf_cAP54h>9nqUA{zJ5a5I|PyZTGJLqq` zvMGe@U)Wsl#;%qEx%B<~8tu+KbrB5IeUaL2009>y+z2RJ7Ldfm=bTMy4Xioc0A$}= zwh8TdPUIDTt_KaSW62xDwMM5zhn9mIR(A!O)hRjxeI*$+>z^mSg)H*hgQV+Au?`DH zQ^Ql@rL8&vdX^Wq<`j|oxm^om6hJw7ho^Vsqtb#m{~5II6(Qj@J6vMbSXMhd)8kE4 zG;vrtK+>Kd;yh7AdVg&x*P(m9$&fd%w>itJ#lZM&bFatuyb>^-2InRht%q6Mx@m|! zov0+&QG8CG{HZR}?`N&<+6*XNxki;Zq!&+N$!=;3jL2}L7K7py18(~D@ zla8?lJF^jo99WpXilQG|cstWdt~LuH=P4Ji#{oZo%WN>4-KRib228FdE_09DoG*YW zTFHOSo8T!bM`|ydQNMAZp<#!qyYk}6bx^MIueLGpVCKvt!w&MQ2mkm*(5ou(F;>V} zhgsdg)wK=IwVLm6%|KU>)bD7=IoSh3NKQg&u8wy>^fhYPpQ-=2*86x{c@yY~06hx$ zoHV40Yvs2gNL;{*Eme1!^ooB6dhaFqm-@Ce*`($Zpp-vri{ME+^AjQ}=5f?tk zE=M=dw(J9&96I7{b5H7-Q8sL3vro^FJf~r4X)|b7v!8!8h?#6qWlE&-msoHrzwmfZ z-s1yD4(UN_e^5VpX8gtQH=s@PKwh?V0IKPcnvxQIx;C!7`wnD-BSTNM;ia;MbA=q< zIbD{w{^&Ee*v}V2%y_=wApXEqBTHg-BIw}o8!dWd+2HS+ol?Z{F*)n&tI(vSov;73 z0~KnxUK*}OIdN%t=LwDmNnsHQvIl+Lb7?p5OIr#dPpo1dGmbxgsK-&EhU&o8Ii#q` z#|pKUn`#Gnyc{X%XkS_O0v%$AWy#whVWJ}0R6KL5QvdNfr2o&9u6!Mylam4o^UYgi zp@Ix%{WGZv-Dn`i5q^4>DUX0sPa>0ss&c>|L}99 z{Kd+V3slcP*jj>1!7o9?QBr;o-c6GiY|3-LvU{aaF49e%g=caGIKr>RrW}SS`HL%W zZ&LX&sLN5Oh#q-F_>*M(6(Ip z1CG)x7_QQxGI8-%ptsA^r(;OokM|*ItM$sW%O*q%@48ndGQh>BUR83;bOyI@cpFm; zDyFOp8m&P*qRmej!kWBUujaFefuF=i%X!~{{MsZv&BPV^85AJiyRZ31*bw0Zo`-Xy zfwCumqI8u`+c8|3HjLuif*B+5G!mBOXf97X$o_z|_eNBkXW*{ESWlhOxqpb({NRo1 z3l1?vP*^)}P(v0+AXF{r%ag+-wwr=W0cpf7=>#*zPMfbyfp(2rgGB^=*5w<75T{V# zmkV7-iinEWLMC}Z3Jhrz43l@40m;@)VFIH(SSe2a=`MYZz3f{H!IjC?l9u_MIKt5$ zQg*=sM@5A37)kN-3*P_?^zgfzgV#m&x5*?nC|c6Yxj`OBV1Mk8-KuD~+e1p1qS6C4 zt~bmWu6sisw`KQ#^KnS~{*t(jAP18EH?>T++3hKEi@yw~L3KuDkx#Z(2yvf_#}R_V z+3y#Cfw{X}?y6D72Lt5HhrZ_W@$75Iy~ZfSo~Lk_b3u2*K$pKZt^dN@Ii2zM_P2Oj zwnoC%J^D%JVDflAQ1cqVvqP0WKKUX5qjOXt#J)-nD-|`JtIq*;zb(27ARltdC$@~E zYBgwKcI7DI2$6-H(Ct&H{bRsF(N~iqJc#hCsl%YOK+e`Rpe12h(t87%&W8|Na&@p$ z71OyosG@z{p06*1wrn83ZaO#UZGwV2UkTzUdYDjrM>!wRw^UN6_Yz*lq+SP~z-bG|z&GSDscU?5x{Rbwlkuy2v}n3RRYB zP;B1g=q=NPRT4-I4X{#6=)9J~fD0|m$shnpTaXTJ`Z%zm;7(N9ZwKLQ5Ny=yVvor1 zTL~lZ0vdiCaZw%GL&c$s~O@TopY1Tnsp@}q>;3IA#PhUI%3>G{m#c{#K`qQ{6G~2BZJlVx1x9 z;4?VF87DRfVec6Ulj=)&9pVLYCsNw#xt3vaNq@>-+$f5xzanI%eVSD@Z-Jp>A+Js+BKo) z^fDo3hjrb^J`7i_MOC!L8%^5ff9*lmsq>)yl?4Wx2U_VvuJqu4H0M5%>66aEvEFOn zG2R8NmqE=C=zBB=i`b{vmZ@W3SzGr52SMkHag^DPMc$Tc9;V)Y*L0CubC@2-dLN## z62wZ`PkZWRF|)tTMfq`rYp>G6h)e@KDeTiC!l6cg@H%eBzf%QdmE}{M5RiXTB zAmzHjilYQMW$K14J$jfB=1bqtxFm~Qg%H&VHwRi0@(4IC3Ro#KQ86*wR7Ab@$ZUxb zox8Z-g%3yB;TUtF8SYd){ukGd5%X%dG*%2(7Blu; z7W|xEu*GmaUk_Hr5u%k2J^F9%^Xpj%E2QFHf1aTskpl|qr9+tojs!d2G7HO7MZo$& z%f0$wA5FB4z?B@4D*kIp2T~^(;+Zoe7@`u50;mt9kXh=%#7=2*fI&R|c?Q2Xh!|VP zf-wN9d~|Uf^k>iWKWM%)3pq6aGG^VVb3kw<5L`H%>OVf*x6zv%5QF))6Ne+%FMbhD zLN4IZc!4d=?;sM1OszhC*7^i@bQu#u!ihxq_0e8Ud1{>dgbrpG)Mt+ah}%}2VXVh} zl$ccqO8pv_IE;2{F)qM&>9`rdWe-ht;XOC{KAxusV*Q zNL!e$8a^ZehWVKH9R zWp|}byAJ~xSOuKiO45SEl#d{So3teok?FxM-ejkoH)wfmY{*|t7%R0`z-f2$Eum=? z^IIXz5k&a-45x;!d>0;Fs!i3pR)A=?`Jj6VINkwmY6BZIa0J$JMlhCx2Wpp<^0%LW z?cVBLAG20_gvz-i#SPAu*E6NuOVTF7e6LVc$YtDo7)a>*O$XzhjNBlO?mGG2#SN^! zYbk(82t_!08!vMt5oY++l>Q=|LmJ(ne!MtB?)^?Aw$z?3IeauDsNj}5Ma7^0_$e$> z;u>=lNN=x%jc|NpkCTA2Z!sktk#@~J?0!lP#VvZD^Um`)LQf&P zdK$O5H3&g`WA$F!4q>IH4N2HWeYPAp2Fv1Z8?t3TCf<`*_KI6WDHjx^0P?veu10+f zlu%*FcJgVWJ*rcPFe9SoJ6*N3CJxDHoWV6v#7dXwq~Z-8*U&6)8S>bZqBJEaQ87p^qSG z5!|;rxCpl4a=Ea>IJO{KE2>Zdi`5|zhA?gX!et$3!_i9sHXI*j@PwWRok0E%K_|bk z(eD~!;OB2L8dyZyxkAvr@w_&=q-^=6K?}Im;*}W70>7<$Z2z$I@e9lNiz0$fyCNQ< zQ$%$C^cz;601)38_!X_JVPXQXlXfJ57BS>J&5adfv}}d}?*cF(M61VE*KxGaVGf9u zyxwpg1byMRBqT2dq;Z14+q!J>1^()AkEliwLa@^KCT*ZID~PDLWD-(x6{?d2@m2^B zw?5gOgNXW@f=4-Ol!q@3+?&)PNi=Pwk3@QRPDl(u{fAa6jOhiK2N1%Cl@bjMN=AcN zYyb6d90s0-Y8k_C(sUoF;MxCYvRBJ`fse95&#%9X zjzZUjfig4QG~%j}@Y^}y>8~W|5`;O}R?uL6wHtNDP({5H=1uCTJBVM(2G)0pIKbHL zCd7q$XFdz+o!Ce`pTq-m6mY?oQ=jglgIGO}CZU5|S@)xRy(E`f8qeM{2qPOxRZ3d` z&RAt*d7e^+u6*m*YJUuLwZeRFkVC;=#3%OaGE27sg2O zXL|8|=^LdO;D3;>$=S46^ zoaH>p5iE|riYlAldIJ;YlpJ4dfzo87UZSd#Ernkq2dJ_u0*%n73Ay>|dEsy_&yc$>XJpO*_+7Oed0bxn!1v>tnS0MunGMcd?tDe$ZM_O2w!D+7*G-;- zXaJSfHzMIVAT3!Mp4wfRu2kMM&5G5fstCZM;1J=LFYQ@&UJj#hM~K^Qx@y_GlZ~T| z1Q*l5bUe3-SwBE^-SVmbey}o>c8|z=D*iIXZ28&K`t78RY?30!u9I#Q`d;PSR%h}& zEW4yxllFd|DKsHx#j(he^TlmY`UMufV0b_PgwqOTpv=*yMx+iTMcIEU@MCsIs#5iG ztXW|QZ(d(SFHm1{tOU{JIW4rU{0~P~pAm`A)jrbcg=K9@s-pZF{PxS|?+Ia>&?H-` zDm%pNt43AXiOz-V?zm2^$(WNBQwqn^SDV+Kk16j}tLIR1dG`1SzNgN#-7nk z_Z&RxZq{_Za?fk4J(q;SV4V1@j_~1^#!({}0Hkl?C%4Rw9eY#rl#qtz2OpP_pZ1fy z0~`u^?u_%laN5Ey>A-gQo7|x6u;q)(d^dJdBdlAK@HJ$zgSp2d+CbnkztqRbQOO8+ zo~A(<2@f012w?eGG3-0HOUQrlVEg)FH9E`tLs@2$dhSv)%f`jQM2VW`C-nxo+^ROJ zkqbLNSKb4~+9Jf9{Y0lG?bzF2KS^A}`{t)N>}M+iGCD!hb-%3emC4xe@fs%sF!Y*w zaDK|3QwEcq<(-e%?%M4l6-L4%_rbmv3gfZua^P?#=wv_AjkU`kgdKbQ#T?d*OW(Ib*4nff-4jy(T<)ILARWF%^ihjjfLjG#z7~a zK29f>T*z0y@RUi>b3G`?LD=Qh+gRgm`jA!JL{&2_42ytjfK-PJL=II0shfAd5$Vp0 zPi#!x8nlGfBCgz9J&)f}}2Gj;T*RH8Nu9+!kotX&igcp;IEE7=Y@%{-ABu}@V`oi=H` zNMYs#TIf^)}`PXSm{`)hy-{WJ=gU~@-1tkowg|h=iSq;wr$?&RZ~2Q zMs4irL~wk9?`RMzP-TN-1ujYYNj>WlHe$xnjJL)F+T|y!{=SxT7%>_5)Di}eu?dsw zI^F{pQB#V_W=YAjy>wL)qwrywSy*K5z)92YpqpIn;hgHp!0ptF#b#eT1Y(H>xxc7= zd5f_&ClEA_E;ze`pap6difk3VO+XfoXU-)8UzvS0uq8J$n^iLmOYR28wjOl8&lBJj z4Un%3>fQ3rAFI5(uURL_X6l;SJ1)_77EX0SOAW{XfOS4xq_GHt%$N1u*}mhoU$jjn zt1ZV`u}0y0pPJ5ON@B7BBDbU9HoC0+0VN~21>fcpufDlrkM6;+M-iZ?PcejC#d$#< zm571X!o$JQ_0VtY`Y&I37GvdBXL@dJ?Qlskw`PRW%TH?3<)zJqPRmMTk*yDBVwgob zb|e?xVn}NyN^o48bTqqLaNvemUM+_@rL2h?Z1ey-Tf`@VE_T&g?VZP#Nr~a@dd`%% zWzR_s-hft5_ycOcbYcr0R0v$q#pNqXU$<_DmaXeQ_w_HvcC$m-yl4B|vVNaU2iBf1 z{$s!P-&l`t7S@z+d9^D4yJ$7-;0nZV{yULC<`e z&!w#PT62*V_1}TvFz*md)?_yET@%u4C%4;0VmC8VWy@PLqX~jpVPaRtOy1 z$p@+J;L!BX_IhPx-S$0igBq-nlH2X6^?tPfzhZ4CbAYv{_v>Pc69hdblZ*@&tSj+>^ z;zpwa1oD=o%Q?%b#9#vMYMC^8jQ8j+a#e=q)ym(7L#t9kQBywAT1kin9&f70?(= zgxwa%OkzlLs&h^sZEd`j5(G+aB6~on6GD(OrQCuiIhvvB9s0oMiu*RX#r8Mndw96U z^X5!yAA>yJ^;Onc5-6G9m_hXbm3B)~vRp4TV;5W8(2-@q1fufe*YRh}zpj4n3RV0| z^2wR$2ojbt8DkHFjqtx_bI+6wPZ80BLS>VQEf=UYN3=nt17SJa<~q1x`*ks6ZA2sN zBoS6wK+8vHPo>U1bwaTs?Q(qzj|7;~cr*9kAr5kXPF#@D8U&b z>d*@7H$GbbKo5lU!k~Tb;B^h4hEKt2en5xrUeWc;3VB51=D;gZwo5^ej94l87VzMp z9F*3>+qeNNN_>Vt;y_`VYRE!AIx++YP=SjmM2}_c~^tmel(LZf`D|HE+$MW`6rT!i}7VwK7MIHd;Gyuv9u_l?`_yEuwRry5^tpMT( zWW}dE$yYKBZ%fVQ5cB26)2~{BD@c$MOGIXe!LL>ZGr(h~MCDx+fY(&4nrqH_68GS){{@gkstQI z8?j^+i;aib0@x3>$3X$9X};@y_~Dlh7k>k$sm7(6yiv2%ZYnH(NOrL zRDkxZZ=uC!+_M&AuxWn0aFzY>7C%E9>L9qI|Dh=}9Gelffgc?k!l;2d=S5xl?Y72G z4Zc#m)d_0(xxKZ=j@2xZTi;bgptaT~$xgdlVI^&FUqSob-Q{?(aeRRDzxY&>(HWD+ zaJ0CuirIYb1%sEti26}Ln?25kfpcN7PaJ%F#gIGRXn;8gY?OsP>M9spxs?*4;}_%l zQ-%g+U^^1lFP}<}4xcLd1rF_T7CJRkXSm}v`8B}s&w6%eMDKm~r{`qPafMNr&QoaO z_7F22bme>l{;ChyGC5O-Nhe3YbM(a=tKJJr$(KC=*Da+ouVGe5i_h^%3{o66bQMAbP?3Cg)deJ;e znz8AHK)$QgtexghUMOnAk__H|*>~^qiBcMg-Zj_&s`W6yxCs`>_eQ-^`xe$^;z8g* zKw$5%-2q_SmL%pa^EVv^{g*)JtZ`r=bkhCPVF2?J1>UzK-G#EC0@3YW@TLc*eR5<6 z)l#PzdB$L)iVvVXYP9B%C)dsQ=?2zt1{9r z%ndI1C+L{;c2t9iws>^{Pa{u3 zZWYMSHA^dJd(G;A$B8Il>~!P=PivgR5k3=^rpd$0ps<$-AWd{IuG)K-2!Af*JP-J5 zk@P6|?5gaI9z5M`{RXx_s`elK`8&gdwir*5)hK=J zA&$t_Up`VABl{-RX=<7No!%QGngJ^@srcE-OCPW4bb~{eqElGC93)PT-N?hv`n2g# zX~S|i<<6_cg(`#Qo*Mo z5xcS4hf?ThfNd*k2_`+j}^rLR2CZ&pa2IueZG;JQUI^J?+CQIxBcop@r68Rh$OHTx0m z`2IqfEk=ZZ`xu3jD=9`6C0RT%UV#U(?OBS6*Km%R#?Z0E(CVq|uTuFY`fah_8qVO7 z(YfNN<06QtSl!Ql?H;@^^!if>vGfRwLU<1R8%OA`9*y)@KV_p{QM?ahr_YC~Uijm^ zrbkUu>)FNZB{1u{jWg(l;K9euhZMrW%krY_%IvAhKZ~BC% z^EE)OhK;3Sac!ggV)ng#n0Q|EA$P?%)sY)&zPW`Es?GJ}C7va4(^;$`ZWAA`Z$QYe zXaF-ysSiGlBlyGClo33huyP}*w;*>SoUS7B2os`TIpbOHTiS~UQ1@?DjsUm1e0^DQ ze7fq?tNIZ^|4nxVip#vEyI|m$r{ncL2i(bG;=`>UYgQzhLt@PHZ5H}9)QXDSMQ(8W zd%FZLc`~^17n1W!&?uP>()DR-sa79{=jWN7YUYB&7=o3t-!Q#D^T4DvkCo#bv@-QxDX9 zs=~qhs_9(C-srVTMUIM3iBo>5S@Dp?{JIWU(m?Bv#kw*ld+%1lGX=T+s~;T65!&?{ zqu@$E!0-l8@yVQxnm z0Bo$*!W6j-sTAazo0_DEpY(8-J87Q-Q0Wr?YOT#DP9Bl#Ha+W{m$5Xc6z9%z3f~yW z44zz=h^Ca(KZa+37cmMtI_7zp%x10$_$`!~`md(U?XF^EIq(&{Md&UyRJNf;+v-6= zYt7oJqrgW7I~ionA!sD>^UL)jr?29VJh!pRqTLj64o-Bm`6Xo&`b+crLUhmYEyxjKeQDhgYQOx#JQzraP2e zLcD#Iz|re2plbjuOuskBM)ntCt`yc@8+&jLHT&PjL{H`Z_m);a4zm_HmbkV5nTW^B zd%yVgHNjd(r;CCYvLKyT_Np=yLBG#O&&4C<;cE}@60^?_F^!(4!xXcuQu^;L00e!K zd~WhC$yDXG8nwGZZuSbw1TI!OgI~>0JPs?BLlbcnijU$V-KtUt_7oyR?A8mF<0AhE zcXEY4L$=;|n-(etzRQSS2BmukK6c=CbqSgb4{O;dgl?$Q0-j9MCG(-`Z0DGKpvHh% zB2{jn4^y#`h65RyCN_A?j;qkIyyBg0*`(_q22w8f#XEQJtDG(B?->rAzxVJ>aoQ29x|O8OjblTx*Y-F^`f>X zlbb8+ZFQHgg%%dbe}9HTPlTJC_Cswq4YvoK-lcHda3bnOq_*ss`F`|FrOELaJoHt{ z3{=&Htui<^*jwO3HESPxT?%hQsORGdPCa&qkoLX1CYSNC-OHx0l6YQ(?GYQ*ONgl+ zFVl29e-iizE7W-vE!yp#d;WEWiA%Tos{-e!n!%mdb-C3av?`5UYvyX>>3;L@O)hMY zZLiLK!;||Xs^lFk>o}XOe{$(dM%0jk!OdeeH-F{Ob^fwY$PTrIDdy*<325=>ua+kV z6qWtR3NWYmvA^!64RNuLz5s7oTX2LJtjlHQal0d)H|_16?3*6t5nG<8>;$jkj`|pp z;0auEch@;ym!Z($&Nuhw^JDVZ>V?6#^Crb?wuQrHD4pzM5}bvj<#c!P$Vzqb0-2~f#HxUAn^dqg zO&;$G|KvhYqfo7qh}z0XF#DAzQm&eqLo!_Y%t4f#xyaiC9x;9~7en+SVXLsir|Nq* zh`RU{WpW!eLZ@To&hm(6#pX{l8JD{C#i%oPjMzfqEH`~VY&$6RYac}j`?=Ed#q>4v zEtu-O&M%xD@|--K8tc|^+Iz^W_nSkzyF+sovm+4(UxouOzwpBCBG49|oKdeQY(_@c z>Q=REyf0M{>NJ{bLnauX*D0XdV89bMy$Hodo~=U|qmOS8M?0a7Uoj3enK<`-Uk~k{ zL?519G`W2ayjJum5IQ%yAQ@ya$hq`g%%FO$ePE;SIbZ11luUsTauJ}9!?qU(Gnfqy z$zL9DtzK-XA586!4gtlZW<}EzAzRIKB z5&>s#PzJq+Jgcb&JZ`UZf{CPe;N6@a1$=?vdtaB*(5ur|KCX08IgO~h7y2wO1Lu(A zs4$^qvY|@k+`HN(vA*<4ObcFXuRbF!qRBsF-)+a!uF-FfuH zcji%st@FQbd-@QS!NoiM=Zwy3#YShyEAMq6!Z}CTpc^=#p^#F~xqFumK_zK~rmH4Y zfFz&DFR+N-Y0{nuGA_G0_^om4li;t>Ym!2m2I%u0Qp@UNb=v&THz&PE-u5~84+e1J zwrJAMnbqBvZ3Bn>nQJz2>bK8wV7L+t3d7*vO4%XzWj~##Fn2F~H*{um_pbDN(7fs! zegkMRb5#u4?`6I(h!3hy0Ch7qcdw=^ohnwqcclhQSfPw;efQsQgFLcLby%+I>XeSo zc0sY8lxP5;c4NP#|3w1qR^Dwmn*=0dJ^M7Dn!#YD8Od86Mwjd9OwUWkuA=^S=G58ThTCP9%6B;(U-d`0EM z`Uj7Rjt6xQB;&_R_|fOfPKF7gGMLFb4yP65Ep+OTjFB5<~sP$@TOR|l$goD`k%@!cdT{aa?GtO zYK5Nu`V2w5Uf%`i>C8FMr>}5Ec5akkS^my=!$Y>wZGk^Gi{8l-?la|!OF3w+hT8*! zJ4v${#cvsp$*+Xd^wQ4A%9>1-27DfsJyh}{4@viWkBVytP~|i=BE-a)}msQ z>h)QPJ-@Q|#rH8Sb;UQ^>N`l}FGv?Y#7V@${sDP;YPl@9maKlcqx13Q1*4 zXsoHMSwdtTRFZ7NkbRk3im^wbvL#u@zRrw2&DgS)HD-{pOa@~o`+I)sz8~hpoaNci zbDnd4=R5}-{1weUa1u@LNO9b#(>s)=Z=z?7@#$Em+?^Y~>Sr>*t0yW-^y4^`)njZt zUEwpXB6gyXy-`XvY5!g7QerEKHDjpu?6=zGtZ1c$c@qgg8cvx=XEFvamcPvPxW97y z;)&w+1JY3QFAC>FpIIvhA}#W?$QyM6W#Q&|+A6VPcD3 z=x_lf3xHw`Km3+cs{x z&|mgk8*N)oYZt7`cqq^}^5{D7A5BVvcah?G@1CUuI^O!6@#~t&I?T;$P5UjDmw9(fqY_B#~h=?=&)Sbcsj4YQpoh4%xa7XyH1!`ly!1x7{ z%VJzhEYBCukD3KyMEX-`R}YqJ9Z)rTnVV&MlbQaz3Gf2GH)NR3=*=U*{D0rS(fw|RiUG_zWz1&Ex_rTotPQqe|8guY-&Pq?Sv*aE(O?aZ430bc9*cQY75GHWp z-N@$l9Q|Q9we52bi;)ez!&)`T8EvZLY*m|4=PN%lVwE)eqVENjaNOF|<67_N0mwJV zqrywy48H5_=Crunw%yUNK8#t6$9UO^J-Bg zg~fG!s?_+`YE%i$0Usam7{~zk`=Gn@PN^7ivj2g@rEV7q1bzOdnV0BeVv~er@8_4= z7ic@CWYsy%XgHWAxkI=`Gbb6K1!i?;E7)&tUAwegd_4Be%gxx}`b|hodDg-dfUw)i@d8N_$sLKkOaxaJIQc`klXPIe7zBsym1n_zlgZjbH7l5{D!;#} z0&6dy*F(1s4$4t4QzZY24~M{1oTc$q7+FovDIKvfqTPguxxoKU^ViY`m*q=Q{8R znk26tMIdF~H!)0=+8N6f57xQ$fSEhx<|o`_Tw zjHvvA@32c^*oZZPB2fP5i<05(dLY8pI|1e=-R|b+RoXny1sNn>o~L1T=I+4lRFI}7 zb$4gkUcJFOIyc`fF4;))u1A9JV#d7Rtfe2QAguuNdSoTSJppF2c4W(gX6RbKfVfq$ zwe*!oDoC6^tLw^>1zxlsQtK0Dqt4>Eo2`kla;&(iff^ZnJ#`#9rXdS3C?$z~hp$l; z5{_SCck{Bd4Nns0ev>i%FTXAs{x-WXoIBoG&P>VDUx10zzU83S9A|E}E%Euq0DJ!< z%@b(VM`qP9#D}5Jl^glKk;8CKt>*1g6m;U|1HSPU&REZV(($4mxmH_ zDLGggx;I}jf%rT{$Ra6M>kmIOF_ftF+9k~)wJwhDjLcyQ2l%~K0tzs9#2~p+Y1PY! zL5o0A%|`yyP~sGOZ_}_=7A=M7y|5NoH^q%rQ3?HhE2 z!Z2bguMdUj>vl&6YG?b#B9^Tl< z`>5?DP+-r$mS&aqn#2p_3HGaSNmk-+J}b)~N2fbEhv93hRKbHw;yVG0p-oK8C7^(4mo5Fsi9Z*b)v)d)U|4@5vQ=M>8LeXb-w@HER+T}Im z=hA4U)E}xy2LIa2&crVKsc))0-|kYi62eWDU;%n-pKV~#NS6XXJA8v>a?s|+XKVk3 z7n`Y0KOQ^0Qej%2k+q$X`VcDH)?g1z?S<{ABdoLC*JV1nsn8|XZ+@YK&{A`^lNWv0 zn}6!%l8=oA_+sU_Npr(kQ?XM7|IO8BZk1l19!knJA+rNs;!2A;FTUrj+J|_hRGF5Z zQ(>Y~5w`i4--pV6c(uk!RKJ-T7{0_HFlsE&GAo1Bftt0>+xVt4ezZ}RGl8_>roz-a z#I@v0*1GxH5BTZ(vZ{j)05f>9f%nO-XV_Z-Bh*V#QdryFYBMawe*#x~wjpE@`c zZk`U(e)&3VITe@Y7D2mvNFjWv?3Cyk*+S51DDs@C~@4(mhf9RKXoSG8M!!~ z%OLy}C=-olM|gf_*fWO=N@9jFPJOjRx}9f@($Y8$5u#|d{)DuT^Oey^L{gI>?x9KQ zs16yPw&Ez0)Z6{PZRfW)eJ3~aC}SBZ!ClDpOGCB2`7}LgQ1&sV7CU5>AX@+Hb zE}tea(!}|;#BcsV<#6GZ@xa!m+d{+o>}XX7T~ri;r*(Ll(waGPXP$J}V0mlBgplHw zyFr*eQ^4bMF@!p20(a}Us^21>&;UdemoNs~$JY@?s)~E|c%zoMDG~zCwa~1xPx^P( zxfaWDB*#>3@~0}Y&s=^DHID|(k4mW0Iz<}3PB^*)?Vy)RVOiI9JR|b6~F(uZbsAYD&DaD65(4^fk7}Tk_shW||*Y-S#Ld*UnBPbB{ z2GN**zc>Pa=p8rmEU>C8*xfo)Hn@%N}W-SodmvXPVAr@yT*R! zMoECt_nGA#rM|FULzBAlwoO*?UjYx#(`GQ38rrFk&9Y>q`0_Q`d!2<^)5L{F1;8&T|M*(N7dC}A6%R1hB#bCVf2;+dG0 z!GclFF?_MN_(Xd%g3ZphIkjCoNtol>r}>?&0GGKovu6Q$UxNLvaKpS77?`E0m!%)H zeE3F=qg7Gc2MROft6$n|#AM&f)4izHwq&KaVr=4}3QAkUedw~mTcO?z zJgH(staYan90PdOO*cvn5PpmGQ{;xNJ?l(zC_nvKl_zGJb@B_b3>D(;%@2+&0wRj$ ze>~&Kb(C9~LmJ~ z%sSHl&eQG<1b@GsVXg#gtg%dvjXYR!Qc_DC}w8^)xz(;f(MOmHqV~Gj-HqE8mFCZvPC$_UOB7R+C1g-iE$CquM-kbgDi7 zwDNL{VU1v;IGm;XYJS_8bkY>zQ}Pz6D;LKW?_HM$*Lx^L+v&5r4XdsZqTN#;S)9qo zMfKHjGaD0mteS(&I2&p(0WvGIQ-~5P?<>Vvzq2CnHQh6EGOn#yY!LVWR3+tUPc^j+ zxufjf4NFz(*=}GD{6xR8%bHrbpOJ`O4+UWRsT1uxD$+DUxd|S-;RGBJI8$Qt1R0P+Zo0ZHu zc~X}=VXL&l!dft!ET^@h;|5DeCgB94xOM}VPnCkkeO3M*ttU{#FTLHJBUEEwy=D&O zA$Qu~dJjM`*XDH3?@B$^&y)<$ZVwiPv$(H_S-f3%UbT?PeXXkSGwCU>WgwB>pfcC4 zXgj}3HH2B~6^#~b36}ow^8eBEF#RuWa{Xtu-V8HcV&~BOK%&AX$NHkzuYr^^by8k3 zE9+C)eslTf1ew!sLy3j!)i`IRa*8WZN1ASKNxUl}aC8ZACYzxe5FJWXTwe!!)o(?H zQZ!3O3zA+pKdW0s8zu%F5P`E~%nVeD3m)=XpZ{FTwZmdpl{({Np&u}pT#>~!d`p_f z*QrC^ejA-QI3r^eDtqA-x+-x!jP%2GB6lRG&k=m%tzG@RBIequL+B7pxEV&|%&7}z zTBcr=Daevfy+nKO#3p9Zn>0mCvk}(HPw87(ntFd`rXrW9F{$Xb@hRi70P}@i?G9P6 zv(96xOTk5HectA-BI`m*=R5O?ITP}Y{X>q%2p`^+FC;UM5c+-Pes>eJNxa^OFKT0W zgdT(?(o^28d0bonz8St1e`X^a*Bco08XKgwan)IO>27aYOsGRaAlOVP-ksJuFr<(y zuO~!xHR*eNV)c6HzOv2{i}f}CZVlc1qud`_%zpGKme4ZF6n=2SPPMv7- zWg;uiFP{wz z);SOSJMu4w@AsoGa|XQ)3UbWI;bBj&$B5QT#rJ#_IrcI3NGKJ1V1?`Axo{;_9kzd- z#h%T*b#g&X=laj$H_TAKaoD!~bvEl*p+&(JrH+~rxwW0ZNc*0T2J8vvQkMGV){?sY zx`JT;OvJ+BEFNaNc9?~xzq|}~Q_ogqv6^8;W>5_3v?=8o0{M%R`?jb%C)y>Nh5KMj zI!xdw$h4z;sHk|udS`k!AUdS<-D;*}O?DN|J&S9@m!yHz_z|q2h^Ox2JFc-^J@%C{ z-L5Tgl*{bp<|}2F9Wn#6Q;69b8QC1o%dD}{kzk-0PX{{Wj@6FlDExiKK3lqKo?c-6 zZvNS=TjU!-R@>cmzq}#?1=p;1vXpn&Fz;r2Uw5AR7G9YskIA_&8ibv0FCWc`%eWR^ zMO%A!zS`;9arD>cFy-wtJL8?Z&X)brTXf8>24*2kE^kBGSrx)Z#RV(%iYB#{j-mpn z;GoFoU!fJ6TZJaPC8}{z|z~PQf{A4kK{b^Q*B5$ znX;&ubc=MCKp%9~7H_Dy|NN1n{qf`IV-M|_y)w$%Q+m5IojXn&yE{|wuKJ@kU;b6| zz6xhkI@I0&R+l_`sLz$cs3bH0&bs_~rsG#0${jG4LGQ7b@#h?@Ds(@vhV1a5fCT0T9|PCqEEXPJpC7f^0HbZ zVq!m#gGip<6(=I&r)n;&pOD@=YeDCF9E$*?d}Q_#A}1QCy0gi!ywGo=f(0vUh6-Hk zBl!Mwi+`4j761|&pVVv4CkPL}v6!Ccqbs9cZwuP46WH8EDRe1=#mwBUA^_} zZl5Nke_JEmmM;CKV4l9e261!AO8OAgE?s>=pweh+Yr!D*goVQ5g^J-5Fs1U|-X7&U zktAHzmS?vjCkSgb-@G8+l&>uE+_>%ERqQ1?ovY5Uj-d2$&;0XZ$jE#id+R#+M#-== z77}xry8cQsb+{bSiXf`9^{m-aLo-2U$*o&dbQP}bhfgVBeF~8U%|3B zBCcpy667w>>~RmCzgb!KxT?gf>8Z0)g&MVO_j8mpYyba*@g8LxzT~KL(K1`pD@NOn zhXPGgq}iZ8D!y6GRk-l0@?KAz6Dgp;j$Y!CA9t4WfS=w2(=w{w3+@UGc>bDz-(wFv5 zrSm*rVNl$fP!95|t3rOAh1l@7-N0SkS$;EEocKmq2L_VwKf!d z#Fstjux;Ux^{WbJ$tphab4v5d*d+^LL#3X3LgKzDL&woCQ#gBXk9rwuaE`P{`nb6~ zi*`=dtQz${0ezmo#~-EcX(&=YXjf7wQv7^+$ZD*vA?`I-hZeGDSdtfLr=9d5>wRs7 zrE;oHA6^FcoZ_3&hpo{F`y8<<83l{FZ{msizK{z3{&7Ar@g(S1firh{p z3)BO575ES!d9G$#`h%?bDjX#Nr2acu8=m{blb4yhuD3f|-xH|ke}c1AqYYeBKE4$7 z{o)qNp|cC_b!9^;r-oNJ{+FGe#AqJ5Vb*pDsG>|!(DQIzqP%=XgwjQSSby>C__GY6 z$jHtkuV;0_wV~mI;phJW%JW;D;%qx8a5Q$Y`uoKk8|BH)mWfL!={T&y zd0HOXc3MX288+y3o{#k&9&@E4BzYzCIC|vd4@3h3ZD3)i1zc`yQ`zkWFT>p4j@k_F zg2D%gUacf-;%KiJK)OIPzUT?6@>{8rv0rXd4dC=dqLY#{)bBXwcKiciy1YtncM@SE zkSz@O4!;H#C#2_?d}SuHgL z^Necl0CRq@$#shp<0qQu!T^f1#(3Mh;f>_EJA}scLGgNbTS5Q zj?86-$?BoNQ6Db~S1DlYp(5(GNJ+XUNVO2@-SiO>zRTNgRR9MXk&`~McgZ(u`)a;D zF9h5QA@+y7c`8y|lmz1yOe1gUy|B$1$V zVecW0a1z4Pu=sfY|6BlO=#Js>gqchM8rS@@Luw#5fQTD8S=;_IpZhHyfdZ6^%jGoa ztp{V8fLW-*BLw>hxwqXNgpA-|PI#ka6Iao36M{y&@b7{dZw8QE9@rK@c+{+`0%%LR zNa;s^XRnbTb{^G3-52UW<$%lpc0%I$uV9q(!or0SgSu*DvANUTp`cut_F>~f0l`I{ zc76>imrJ|^5)yN2prJxu^S*Jq-hZJRE$6odiJr`g!pJ5Eedj$~= z#%A`G?_yIaE)~E}8Q4$!()NLRs|p8zxBGP=Au`QE!&?hlDCJkLN-h-=I(DXcOPvu6 z(;ydccOj{%P3X4>zhIJT&M`kZ9hrR#BUF%Zl=IzYp{0oqLgFDX#Dp_@MJS%LsFBtdr% zSsXZkkrG)RHE`-3NNkNVV3MPq?GWWoU_9kqiIfGz?`gTUNvha$!uk?HA~eyjB5f@S zzckJf+U#b%Z7=m(X*4(g^L?J%u21BHIclh1=}wp0{J!b@q2rQ!0(pBQv79F{pIc&0;ozdnWTU+gzbk3}mRl$G1)n*ubRXeJkowm$v3lEMb2e zLFFshncd0l0ZcKu73OI;8ghnDCi?d~Cgx}Q11suZS)pl2l|dQPLKHcU#pW5dz*(m>;j^Vlg6-8l^BxhvcyvwzJ<2gF>a@9zfF zWsjnhP(e>Px(P^d29-1L`*j(dEpj;*EgmY!68#ni93EYbg1dSbw7KKk2^E}_25!wP z%02I?fM3@`fK&-gS2--c__&2DRzCt2 z_rSPf|1ilgC{EP6Uu`1}+~E5raFb-VZcw)YbxGhTEn1d>6MZ6`HDpxRHGVI$0uVK8 z1UKP=2sove2wrCZ;g|`Zjd*DKWh-fR;G<(&1MR;E>7fix&DA+v0?4V@(&HL_*Fph_ zJct!p5}6ra(sf7xoqbl_2YlG=m!=*Zuro zaF!|G0nbYYfye2wvTpMqt$h{U;mtS|kOcxI& zL27<5pOcmp%u|}EC8_AN1@iY)g2kougSO%WVZ}fn^I)myp~u~3|9r?OY*bhs17~UR z8@_jYUlcg7CaLJK40=?Dz$VG;XivkIddhU<4w%;0-hE3tFb+2CW36pV)`-xucjKz; z!1aBXJPj{CN5DXv!=G9XFxDc6jG#I1EC=84hrj{Mag&8`oues;<#5>x&omyX5PDm3!G>&h@egM()p3<_-k1<#ryHo?xoXAN`@jutBmZ4uOBgFeg~@T|1#{7G&U1hTC^nF zqlOo#;IHb(WKOjH1qtr&6$GH)uC<+9bpZ6mVp2@9-?hx0G_2c-;yVI%M-OW_z>R#gDIR5Owl=@gs!!SdMomcg**&p zzN6$)3RPfxs0Vfc^x40BVoafRy%BY3%5PX-g7zM3C0$#Z1nf&NK;jewQ1;ES(JS7) z?Ulv3IWaJqbx8cxu>298VxJ%$AT|3l$)>dNkr9vMS)b@rKh=_~{|=k-5%kAt&92q$ zZueVbwtLGYFgoR0<+?MOu_5#$%fA6+!fRhvthVdq=_U*&&^uG>yccK_tqCkBaaa5x z3aEqhs?kT4XMZ^}z{Vfl2Z&-#<*Aq_!M_| zRyn_KhtYwPS$KD!53XTQ;}oGsnX5x7j?ljAd)(sj@ou^hjZH5~k!sGW+F4yOXM`)MIDvE;QcU>l7OC=8k_eLj`ihB zuB12au<$aw&L_Vm7h<=Sf)DFx^1#}8++ zqXfH$dOXy7SvDhEK-{f0T{fr6;My>W)5Ftbz`z&?93mlT2 zFB`30KE&A+g5IP@yCv-op83aorSE`^f6r<;o7s>#DjRFwOwx>uOHRUcg>fu zX@K(6)%U8S$tL>{l$;uioXxqNkczFisPz33rv~BZ*YyV^T+~LVTM?9kNQ)c|j@|BG zicynZhLJNM(=c#@TL-TNS9wbvaJGn0xU-qsL`O^0CHDcGZr0bFtm~2#-BICfHmYGhrakwe%4UYRwew+gzrvP}dXD z71hdN`8`$FqBjQt=pVKud4Z@BZ}OWFb!#?iM8^P+6bcj2`f}g3==8wC(|v&AhQ!7# z{QBfp`-O>kv>o^NpDR8*D?UxxK!DPHem<>NkbYN~nnAnU0eoQ12_}w)Rtsmhokwj~ zlxeOX-|pkR7Cn~Icd7?~YHC&A_QW#u8OKi(>crNs25bv|XRe~KQn@X`m7i^VLoup>@9JKa32e)!^2yGlcOCds zZWTH2`sLlU1mwc+jr4C%zxugI2JWw&#sybn{G&>^X(Isv*io8Z7y zl8ljC_eDb8PM|{KN=75#F&<1SSXr&f{VH{xqwgFm1hxC9J8yjz68jTR@=ZMG12Y&@ z|7e?X00Zm4qXI+L`mf?!_sx3iwb9XAAO;O~SLMlAdRec`Nq31Aigt?|mQvp<`cSXT zVt>Y7^Wm+pbo9iq?$AAHK&E`*>1`S0efbVhwo+Zs1J^j@<0Im(CL8~*t(SZ)S_H=# zF#$0C^dVvTV2JnmV94QzOA^Dd=@G-yJC4N90ep5AN>0qTCID3I|~)@NwPjBcKP$s4GKQ{U?Oh-U_MGV zTnCxe$gs6OcxuxCH}8Jn8Sz2%t#7)o!JQ}rX0TtM z>ah;H*bD8>y%Lq%qwHYcPP45tt8L$?KYvJ_v6aZn=|i~o4^enmW$tu2G1S4O=wj+T%6BXf-VyU{Zsw$ z=s~;Y2>~_qPWOFGiW(2dY3!ML@oW+|7qAj2+28os4XNw0;^5_xe8Fqc&<-ND_wb49 z*FniR9j&tS5W+~d%UDo8%Sm2Wgi;g+`B(2eT~OxXB(rZU$ws?D+m_@WS3VtB^M3TT zXecY;*)x1KY`D#mjs+ip63*gdI`(bz-&*$y&5Qo)Q!vbTh?2p!^fmq=*+Lhs2R}-K zej|UJwE}wnq;jAQ1#ATIclX-$yMnTo{b*Y^ym@{UpW8GRi1CJn_wuz1oa8tykK4P~ zC^-b9A()ugU6rAvZ8zUXWdP9gR(R>lezvY3|~oI zeFbG|JVurOuPlBHB+WSLHd^2FzLLc#a#}6kJZgG%p8(VIIVbMJ3V|CX>8j_bND zIc2Y}3XJ}3^F8ty-8W`(%}A+12r#Oz96cPef_r~4?Wx0sF^)YF8ROqJiy$^S9_+C$ z4nVzzP?onP>`}Ht?EJ;I$HN0j2-{gsHm+fafN3>R@Vp=}^#?D$DV^gan_*e;r|2Df zBV+;?k?3C0qnWZZ7a;2v^0_RtS5%sD39=q-5-0$M2D&-Ae%FhG7#0HpY~7D8;5<~D z!FavL2h8y|L0=e}GAbck4itN(v|Ej`ziZ6K?3XCg9LCY})&2UE4lcQv-afhTZ^Vg6@;DP#-@ z73e|)G`s<5X^8aJ8+BojHJI97|Bc*j6ebRkr!NC!@xZq8-ennpOb_4FPVGS1TIaZ$Aq%fa&d0aDCxCB| z0#UI(pJe%ZRwO(kUJzG*u8r*x`Q=3L!k#r0IIC}ML9m2Mx-RemPJjukZ*Kh)28zH2 zQxNVx7^dSV_zLw`6 zwDc-F3~=7726mO;EN>Tn20R`k2pe>w;(B&u1qyrz_0l1EJOr?v$D8Dy?Eaeq{i)O ze*=u?~eM@!zNeyZm;xZ60b#sjXXfndEOeeG$x^)XI8&?8Cy zv9b)^dWp!@@w?kgsM{O`h9Sx+&K?<7Sa;$Lh(7@^B(K(zMSby+;8`ZE+GBLr zAc&Fc5F-y;%$A)txlj&dPlpOlaFt~bmtCaB+&&7SO>{6Si~E3Qrx+U}r^HHv_Pe7F z=s0U+`}S5DZ^BN847KNd(U3xa=W(cF_o@TJy{4;3cVEqQ8B56Atx8!u!-_N?M> zNU$>ESjV|;7=a7L_)v0b`#-^;fz_8yr>`ZcMVDobW+pin{YNYhz&V4&@Rt6J!Anz% zO1>D#JUF^HP}E%O{!%DfsC@KI(D^YDhq5jI#A%%*(Y+$@r+)GMf_zboeAc(>g7D4d zC=5O*$N6cHp?nsY!+Z z7Fmou5T$3%oyZ&-(}9T7yG!}oYvxk0WlXp~|8M36!QoMub9Ps=w67dJIB`R#T&(Dx zp(Hho8uRmyd<1QwCf;RfX3|UA7*6S=szA<);$H~K<}m8OFYe!PR7eYB%gWz>NWVij zh&d5)5&#w5VfQw^T7aPZ^p9pWWrwIL1ZJHynGuSDEJ{~tgL z05I$52C6Mcx-cAme@Fj%BIQRI@(S#aSnffNwoF|d+d5iFGzV@cSPd`j=9IyFsStbn zKRW1^=}+#d#7)w_mCDH^v2dG*fwVW{2XZc=_smqYb>D%hPh8b!#izqI^H!BJShyKm zFC3iJn3{L~6aQ=Bh;L72{BI!fdI-_D^VZq;|G~Wq*(=I8{upj7?c3(pcf_;et=gCb z(G!KJUcxMO5D33V;E#Sz=PJJ88uV^mdfXgl;p@?oYr!vw3T3Y-?H!l>Z3myz*Xnr> zUyf#W+JwwkJ6QbaKIS*&6++zsy9#@QO%MhYh_Zg^v#hxaLwJ~#g9ppJ3Y&$9e{3~I zb59&hEtwBt7OUC;-|%0VFS9Hjsh}{ie>j2HdFY)^*~iVrDia-?O|+d;+Pqz6dm8P| zkGNubOY#plA6#UCz4Z<5N2e8rQ`}GkGAaO8{gE9_q;lMY=HL1yw*z{UxsS-jq_mff zR_n+Iy!h`dKxMh9mpVrL_z}6^Bkv7xJJmZyC#D8OxjS+#!fh);-K#$Lt^9wy5t6Kx8Gg@Edzp{Ans1j^Gb;~ zNIperjWNTXyK-Td$A7y-0bp74M{Si}rNBl2f7r{k2mguHn+v#Rzk(7m#md%t@9pM00Zg zIx+;b<8P2D=KEW5WYCv516=0TRIoZ1q`CmE-tz&9XBvOofa;HH?d<09pu0I5CX-}B z5Qrw?n8lATp`h=2=|g-y$oh$2`!{ODw`+b`s_c#@s8McK2~x7pL}t=%L+QcG`VY6n zt*+5EZeFFzwR8B9bzy9c$I-URdt;(3QDg4a?uLKGG)?@fY)4H_P$(8~uJ4)}(H?I_1 zmEQwSZc1M}?e8@l)g4qGEr>*Iy%J)(D#|etV*L#%<7N&qAl3&2uw_DbuK4T2WJVol zVm2GpgzjZsjFh=E`CEkCz{Y}t9lN*LWG*q?q)KyvH>?_se)qf937VAt*3H`tk_NuV z0lUIRiPQwIZ*zj=?9G$|V4S%SuinmO1zp`!$NY-!$2@(!OHAgDPN#v`0W0f<0%MWW zDQH^r-{0mLV18;Ll?Xt{K}p+Va=m0s9;-O-^lWKP1N%HX2wu6v1sL$$5u5%!L+dj8 zk!j_-#2>R^+>i02!7>y9disu`*QKy9?izjwM53^?$zw?*)dSz)LB#R53k7S1O2jZ$`mYths$2cMIsIBn4PUFP_2Bk?rNsJIzEp7f)w8zv@A?Lh$m zcE!5`Rl{??r)@4Ugl3VLb1;)3<*!;K9}kfRZ1`AC%oV=9Nq-SUiw{tb+f9%nFrXl= z+M>6=JHnwjy8LBod}m=ulEYM87)X8=LA(&vOZ6a*QdhW1Bo&pR?%T=o9vk?n-BiQw z-O{wQ(NT9Oih`Ms)%Sd&5y$y8NxA{J0giR)NaYtca3kt3Z_MtGA3wjFCMzG0EB;o$ zv$GHrkX02@04-ao;VgzK6qEOmBPxM&_WHWIW4u{5o7R%KIG|Ygi!WQ|fvb1K^hvsw zeNN#stG177#Sm`3rXb$ACkg5}T2G9-cU!j-6p@`w_Vv1*EbDE?Citc9n1e^`2$;31 zk$~Opxt$$?N1!4{=jo|Fwp$UYwc;QWaP=b9oU3|PDQTJwW^nP`=bNN8!Zw2~RE=7H zRfswlxFxf>DKCYpHs^_3Fod}D0x{?hSYGxumBX$S2X(fqTLkL>3_zLnQJ|oT7gN@; z%`C7knZsC&)XnObYpR3CdH|D)|IQrF@yojhOZXN9!)$Ngyfur%j~xfu4pE~L10%g_ zE)k5%~S7;rbR){!KSjI=JP(_V1l5aXVP#PbG! z$8$e>f5+XI!L8%uprD7N^S2cyK0pF zUB~&JicEo@i@BF36mcqmChP+bG>Yt*{#LB$5zMY0PMSJl%0a8$t!3^$j}cHq+U~XV zfnx8E%~rC92}bap-u$N-&&Ew~s?-MRuJ<+yXI(ndw|xfQZQ+{llAQuAgT6Q=gP=$L z?nQ>DVPF+*Y9=eCbGx%Yw(cP&VJ7ukUj)RE5Ple}q-RB(NU#vS_sXJX=P+Wl_+OT$ ziwQGx+-73pU3Ue+YbpX!UC_!uCcx$7V(A=HQBXXtpMW3VW|<+}5Gc?OF|yefg%&r> zDNXpBDLfG~F$*y-cPFlRdrYT-e1q6c;7a9UlHa3d|0=i-1@B3GMpl0jDi5$yUCpX1 z;{wS6@eX8mMox?@+g_+@wxq9eJ%(2swpTAi8w4+QR60CFLNZG$7hi|bdrZ@~#lyh> zizZ4Ok0Le?uZdBU>!39QzOUk}c+z+Ak;6m(0e|O4*7XoZYnFFWDs1#jOiVO1no??N zrg-*NL08I0*{9fbocs*yxls&lxJel{oXPs{eijb-wI|NXR4Jc?+E*b2rdg6(915Zy z$xN_HQ(cSmK5*uc}>^_K#jvg{P1njY(iBMb-4KKcw;cMlDdM<=6}N-)BpnI zzu+d-*zg(Sq~RLBqfQEvl4c1c-gEJ;3f#&d296ZYF+S~k8h?Y+$ukQ0(U5y$- z-|=v1dV0bu1F6?PyG!4J+>*w{YGyF;`iH73GU?%ZUg1Qp;IL!pS!?ZKT8r-Iylzd2hX4LL1n;{6Qi9-B45)r$9mQbOV-%-@U<<_gS3p+hEK_mF8Iz zT==IHF30z7V>2G1vNxN4HT=W!wd>LSjV2WvHG86Ccl%XdXQ_5iyAmK*b?tv=s)sf4Lk0NS7GGy}^olC7 zJ-e#+N&j&y(WsE?E#7N~F>*qxiTn5H_OZ!9C*~Wb3)3c$?7(t>;0jIr39ZoqkIPoaq?Fzmzo)W(x1vepe zTKr;9Hv5=pmhLF}(+xatSjUmw`E$!s(8QCh5AEE9ypV0WGyRyzO=xu<>&TT7LEZm6 zd%Nx=`l%rEw#oTP>oibow5{0 zys+DNXJjGx#1iI_R~Z3HR~%JDh4+VXOENFd*2L;vOFD^$t5VDATEqbDp6}E8xi{p& zt`P*-lv4=U5U7vJ1+J_dveCswmPBCX+V% zvo)6&A5JvCt$q@{>-P&$3yA#)EZzkXqCtV47KCf)i)5H~f2eK<)I-4#>#T4SZ9P@^ zTJ)D0^}W)bR{j|`Rf(x( zGTYZ*ZLk5r77wC{>#W`4GY%7m6@XxzXPb+c3%8>NIPb2?^Oszo{`-rfsabfYs{t5D z_qpI+RpalFueh~Vb7E?il!b+THK1@M>UDmfO*?jRc!OFHQdJ}7We8du+=L^n-E?OQ zGW|4H`ocW)=Fx&{pvOOcbHldb|k&0*a(7LFMEmDw0o5WG&&u^s7|#{~$EZ3m^>tTk6? z**ZOW;#yRP@%Mi?{xvNiOV5K121|9^Iy13)k-3i!Oy_77E=rob@K&z?A4+_nMx7aE zWU3pFe@4o5VaA%odHDFaxEM_4HILTix|E&NBW!FVrnLmFYM&~{9d8l=Qcy`1?#$zo zn>_YEe>$p=Y?tq;EW+1HKMJBe)tHn>4{Mp@<4yNxjQ&k+J23KzA}LuH?XL@_{h~yM zrV1s(In^l~c!&q;)V8!ZR+H)QM8o?#cchNrvnZ{fScGnG@)mt0(DPl9OaPT4TZ;jbv^Czkex|8)re zph|7KbeETx^Jmrvw+rN(J{h4xPLN1%{*A zI%JzX-`t0+@yjVr{Z{Zcod*v-`kia3!nK^S|1R~dHaL+g0NPvSVH1;PC}rz>%3M!t zubfgy-gH0!x_8wdZF0)CG{il&b@`6PBWgb=xY3cNn|PL&^`i&{C~vg(bN; zsX(PUwz0+9SrZ_igAXGwJKl5n8C^K)xm;F0>WiA>MN7||QQ1c6iPk+}cX;btra%|# zg~BgEUGY=5HZ(~;S6f!)!Q8bvDD(P&9pf<+W^XcWqeVCL#~%t_eXENd{nxC8J%AGQD-m z?fcn=6vosuA!nspAtc`0IV#Vl*Z9LupuiuF7$8V(0{7GV?N~JKb~3YbOZ^0Znhw^D zb}I*0ae5cxR+YZJYz=}5h%>MjJ5PhU>#y;_7Tz$H_2$xqb`Kj*ok=+k=#5~g&M&^5#B$e zNK(xZ7WAzV79K+zkmICi=L^L#AMPpkFg$i$7aC7OvcdMtm^-ABA_I>beAZnZI*AsF zB(kn;)E`<&NX7FgL7UFU=bvk8&$tt>)|Hfh1y6SOVHE;9Q%gdZ+;3+-S?st()XHX_ex7k&A2@B z|Ba4HWSw^_)y&A1_!8VYQ-1)?=|#4Dyj-Yy<4VD0`rd~4F=K4`Is6?#%it4e+eo5I zVnv=X`C?kk-U51wPv7y7Q%2+Rm%f{HHRQSg>VeHGmKT2`cPSWXZE~-p%2#YBzbD&j z!2I!ba-29#9O<|toV(IQJZ!Kx9UNqCKM%pX{>%RMzLMR5@iLMzZ&B)pE}Bap^>(G>*_|ocVdhauo}wwF;7!+ z+(nwW(~%{WzM-xH2J~6C{s`EUNwWOlYv$YqbP!ih^R<0 zC@2UxqC`551w@L9f^-zYP?8Xk&=Ex-$`AzyltBn8h?E4777~h10xBJW1V}=Og`p$? zkw9p^6W*J(W_{m!Yu3wO_uRX)_ddUU`ab)dR-oRjOFx0U1ABRG>iaV0{6rAdtT!fd z_H zaCXdM*ZYMLME>sb@Y4DER?vmoDsJA6c^8K4;0rF;ntl#)0bq{k{Rlo;%i!4Yl*U!} z6l>&eo0)46fiv2}>Bxgt^ttTX#fWC}$F_Td%eDdcpWkE)=7v?+k(rD?<{H98=jnal zugU=fIs7RY$yk7z_KpOz9|B}fjH>h5UE?P|o$X_>N2$a|AIM;~vUp$)M-ARPyPs^F zJ%RQz>=*}IZnhh76K`PcEQ`-t-`ItUECQ3J`-&JR-RlbqZgF487*LnuK`$}g#&ymA zy`-D>Xp`YVuK$uxksKT|I`I{|GR0U5T@(7YaxTG$+*PkP9-T!Us?p3(Y@G(h?{O5! zh6p=0##`CdP+p3sev+E@61#);a``)#?3JxUXOE8hko{Pv-H_(sh)6uze|R06lh`bn zk((Sb6wBYG^xf5_xzs+U0mDqVcN4}}gz3)VCXE~Xn9Tt{uUHX`>)Et}FKBB^BNZQ2 zHYEvD6!V9DeG6DMp6;yh(4Luny7!lAamBo`gQwhvGi{yVp2;(sQQ_AMRj1zm0!Z5eU$bZQT2?O7`-I74b&_Bq!JW6d%JX=P?=|^yI2dIu zt_(vCekvBqswF$xMz(44NyvspB>mugDy|qAP$>t0gwE<&NFIFduqB>z+Au-vuf99& zaYs8SVKdpkDi|SJpBSKD9Q-q?EEd>@rQ%6@})?5k-~wPBYNjSM`V+!Z%Oe}wPP zzb$avD}l=NAt#s(htILdF8n$m#nnV^{?^JT&lQ@c6Wrt6(XhmM<=v{Pssqdzl)qTR z5xZjf-L1)UGHm4s8+ifKk73Uz&P3d3n`3XsRAnQ)>Fv7pG2dps2DqFBybn}LZrdraOL?GBJyPUd z;JP}O>HH+Xp)GqW<{YARspi(K(=DHI@u=g#0|hBkJ#%S8-Q2`%yKxwj60~fM?<=#V z>+EGRdo=K#3&M$xcJ3POy8E^2hREnt~; zo6+Kq#F?0~u#fhv+rK=;vvc9`G+Vq`si)>Jpkz?_Jk5$Yo{7CE)ikNrr)=^~R$xrFeHQhMOCEt3p8jQxDzyFBk ztIIjDl5|SQ?RQxnDSwEwH{$V}dVoZoUV5cp8rTFccB8uAyfI-<0b}7}Ie5j8!7x&2 z4FxWaAJer|jw@!h$HLG6vN72NxfL*$tk zogK0`G8LbX5>3DGT>W;Vv6d2|F>q8#3~$DB!yJm9K)$d(%9yex5JVYTFmsf%4LN5O zN79M?(P2UJiUefs6$XwPJ+qu4Yre$ou@6%mx~O;U{&4k-4RWmiYd-z-y}QrW%8 zZw@uQ18Qiu*}Ub?FGI)3(#(jFQ@kXLo@gI!BX}Z3mY;*l9{((ws_3i^9QgJ{G^?oR zQD?H%f$rWAjWOtWIXhKRYm&vQa7{4CLeStOS7$LAxc-)%iIpeQm!(i23O;BaS@kr1 zzL7M%a}|YnMUA4Qz4Z2b`Q0p=SI}ve)mpFPX(u@x_|3g;A*s>5#j`W1|IM2>Hp%Tz zL}4+8v#Mo2LUaEU>FvG)Z~6RzBaeK=TPjYMYuu63DV3Lb0fG^0%O_7^JO;F$hxCv- zmp!*hi<>sX&gvL!$9M?2gDAh5?`#?o&Jyxi)yiguAwFk1hhPn_*NYh?R8csD^It+5 z1$bK69P6HhaipN1rp#k!eW^|Si8Q}nTzz{@x`(<0k3VQN)r#V?EqMbC>dZ--YtcTN z#bb~(=Fnv9EP`B)e-u_1Ss9Zw83P3xyx2G>y5P1&2F?h57tt%P{K8PvEv+;VDz?6q zhgL9+lNl7l5;_)Vj6%fZYL44$Mzxg9HQ4mqL(8gEb0lS*Sa?YZSp4KI%(#+sNZo9C zl2I%za830AznB?rTT0wCW+5Ri zGS`WwdT5^|yOU$Q|4_z;{A{AME;?gtjQUeXjyR}%a!J><&*ExiCLzkwo78_m02Wq z?DJ8J4)_wDWw~az>i086C_Y7$-W6!Y`PQXV#tT z99otl{iwVnA%3+7z;l);6uAw)k29#V?cj#p=ltXZmk$Tmnn^sUB3o7u$e zdIcy5NCykmX+6a1vy4s+G+*py*+qHS->2sw65_(o#K)hVfR7e)k3V0&q0!%yGui#C8I6hpU#D5E#6 z8wuaP)ZR1dL38X5gMbe?%Qla*UVBNi*HKxo7G=cZ4mGWJ!LgzyO=xt@?97I{MzL0n z(19yR$_cuh8^g0nrh3p(dDgYiiE?I6No=aaXr5%|7B>!2*oPM-vNF0aBzCW!)QyvP zEle8#qQeKwSX~p|GT9-rf$js3n$|b;tYB&Eb)QGf9?8J)(+nTcor$32cUvRJJP?15 zOAn#d3leR$Ej&~X4*-|koA_OL*Cdx;iL?+Irc(8=-6UMi)BZ^SD3D$>hk+DV9hHE7 z{V=U6heoKMs~_nEJvCv9v}>3DOxN2fU;knJVcC9y5F*i~TWoU-cl^lA#XM@Eq^}}( zOZXcVD~^jk(81<9w)$CB7N^-4EiyIy3caKjI@xvy>#EJ}uswE|Wh6VopolL_>8kMC6XM(B75QxArhCPo=7yb7gM zWu4HN7_^;>Gu_(Av^@f}-@e*1oC+b(=OTgf1f?@T>cs7?7ml~i$<=rZA03zNml%&? zNXO&UU7wjJ2k_izQ`ADu${%X)AP~ucv!_m6GL4hU6DY__!|;I#tA|J*7e{+(i!xTj zF&aBh?b#Kz68{kBljLwi)sFKO8l@Ja-KGbC*zvuZ%=e$v#z>lY$>yPn?a+YR{L<%CWTT;m+PHOu{$;tLZz~x#*h6g_& zXU5J#w9ea*7J-B;3uaTFBIbpP#rpg0nxNQztI)D)O6l!Et9U`~cj$mO7mLYX0MbGW zXOjRsP$~xT6VLW9wFBrQ>aJUZ7wD#{Sjy~F zGuLnk>~`*U)DA3Mcl48jKKNjOE^N4VU;_e#5(CFPD^W80;(Eky-r!h)a`1rA(*;eR5AGrFX`_JYPBlED%59EWTM=%uwCm&HJ@h+FV$&87$k z1us8UaZ0Xu&Yo-NfT?ut@f|d-vUWCy?V!jE=LWuue(;c zfy|0hvH_<;Y%3E&J>Nmu7oJ?lktOyz^Bj>aQrgKwDIZ+U)noP>jc@UxF z<~KibbFh9N50V7}a1NXquT%GHcSs69b5LVEG@*S`ng#H}SB&I{` z=RLi*e((<-3>?_-6kD9H{IDL**c}e`zh50By3}&t8sp#p9u{X|Qj>s-UnV+#5z6Nt zlih6%0=+H|)~@{Y>fO0!8?Zh6H>}{Su~%~xN(A=0xA=_-4DiU_fArUr)Z@RN zKH5`Rme=!@5ulqn|D-ac{NGQw%8*k#j)Hl(l?jhQauMBS*?XnJ*|LC3TE%K;4dFc2 zo_K7V8aM7*em2YgKz!fi@_Th|+?@iB3;1Xf2npYG)p>3hknih zT%KJnEt#3Ui<5YW&BJQVZ88V0(l|s<2T$`*mGsytz*9|B8M1U%lrgGu^Q|5bQ3gbm zB%ijRL-Vq*rKN7QADgfVM!)&*0;!+e*ubb=*>|$jY%jnYQVHknNkv{$K-HUD zeyp0KgV>KX?ze*pcW~>)$t7TIx>i_y}ea5O8u2M2!o90AUco9s+cT!%08@ zqHC`~5{wf59~Jo1|3>v6*8jlkZ&ZIN@RweHllWh##1pgsQ7duA|2c<$llOno`e&`g tfBt8x|3Lf~rhk+8cO3o;)j81*Xwka1*9r2zkr41Zd)n?4=@+j%{{m2i_jmvR literal 0 HcmV?d00001 diff --git a/frontend/public/locales/en-GB/titles.json b/frontend/public/locales/en-GB/titles.json index d61817e520..f5641d96b6 100644 --- a/frontend/public/locales/en-GB/titles.json +++ b/frontend/public/locales/en-GB/titles.json @@ -1,38 +1,40 @@ -{ - "SIGN_UP": "SigNoz | Sign Up", - "LOGIN": "SigNoz | Login", - "GET_STARTED": "SigNoz | Get Started", - "SERVICE_METRICS": "SigNoz | Service Metrics", - "SERVICE_MAP": "SigNoz | Service Map", - "TRACE": "SigNoz | Trace", - "TRACE_DETAIL": "SigNoz | Trace Detail", - "TRACES_EXPLORER": "SigNoz | Traces Explorer", - "SETTINGS": "SigNoz | Settings", - "USAGE_EXPLORER": "SigNoz | Usage Explorer", - "APPLICATION": "SigNoz | Home", - "ALL_DASHBOARD": "SigNoz | All Dashboards", - "DASHBOARD": "SigNoz | Dashboard", - "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", - "EDIT_ALERTS": "SigNoz | Edit Alerts", - "LIST_ALL_ALERT": "SigNoz | All Alerts", - "ALERTS_NEW": "SigNoz | New Alert", - "ALL_CHANNELS": "SigNoz | All Channels", - "CHANNELS_NEW": "SigNoz | New Channel", - "CHANNELS_EDIT": "SigNoz | Edit Channel", - "ALL_ERROR": "SigNoz | All Errors", - "ERROR_DETAIL": "SigNoz | Error Detail", - "VERSION": "SigNoz | Version", - "MY_SETTINGS": "SigNoz | My Settings", - "ORG_SETTINGS": "SigNoz | Organization Settings", - "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", - "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", - "UN_AUTHORIZED": "SigNoz | Unauthorized", - "NOT_FOUND": "SigNoz | Page Not Found", - "LOGS": "SigNoz | Logs", - "LOGS_EXPLORER": "SigNoz | Logs Explorer", - "LIVE_LOGS": "SigNoz | Live Logs", - "HOME_PAGE": "Open source Observability Platform | SigNoz", - "PASSWORD_RESET": "SigNoz | Password Reset", - "LIST_LICENSES": "SigNoz | List of Licenses", - "DEFAULT": "Open source Observability Platform | SigNoz" -} +{ + "SIGN_UP": "SigNoz | Sign Up", + "LOGIN": "SigNoz | Login", + "GET_STARTED": "SigNoz | Get Started", + "SERVICE_METRICS": "SigNoz | Service Metrics", + "SERVICE_MAP": "SigNoz | Service Map", + "TRACE": "SigNoz | Trace", + "TRACE_DETAIL": "SigNoz | Trace Detail", + "TRACES_EXPLORER": "SigNoz | Traces Explorer", + "SETTINGS": "SigNoz | Settings", + "USAGE_EXPLORER": "SigNoz | Usage Explorer", + "APPLICATION": "SigNoz | Home", + "BILLING": "SigNoz | Billing", + "ALL_DASHBOARD": "SigNoz | All Dashboards", + "DASHBOARD": "SigNoz | Dashboard", + "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", + "EDIT_ALERTS": "SigNoz | Edit Alerts", + "LIST_ALL_ALERT": "SigNoz | All Alerts", + "ALERTS_NEW": "SigNoz | New Alert", + "ALL_CHANNELS": "SigNoz | All Channels", + "CHANNELS_NEW": "SigNoz | New Channel", + "CHANNELS_EDIT": "SigNoz | Edit Channel", + "ALL_ERROR": "SigNoz | All Errors", + "ERROR_DETAIL": "SigNoz | Error Detail", + "VERSION": "SigNoz | Version", + "MY_SETTINGS": "SigNoz | My Settings", + "ORG_SETTINGS": "SigNoz | Organization Settings", + "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", + "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", + "UN_AUTHORIZED": "SigNoz | Unauthorized", + "NOT_FOUND": "SigNoz | Page Not Found", + "LOGS": "SigNoz | Logs", + "LOGS_EXPLORER": "SigNoz | Logs Explorer", + "LIVE_LOGS": "SigNoz | Live Logs", + "HOME_PAGE": "Open source Observability Platform | SigNoz", + "PASSWORD_RESET": "SigNoz | Password Reset", + "LIST_LICENSES": "SigNoz | List of Licenses", + "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", + "DEFAULT": "Open source Observability Platform | SigNoz" +} diff --git a/frontend/public/locales/en/titles.json b/frontend/public/locales/en/titles.json index 26e2141d38..75c8d73bcf 100644 --- a/frontend/public/locales/en/titles.json +++ b/frontend/public/locales/en/titles.json @@ -1,38 +1,40 @@ -{ - "SIGN_UP": "SigNoz | Sign Up", - "LOGIN": "SigNoz | Login", - "SERVICE_METRICS": "SigNoz | Service Metrics", - "SERVICE_MAP": "SigNoz | Service Map", - "GET_STARTED": "SigNoz | Get Started", - "TRACE": "SigNoz | Trace", - "TRACE_DETAIL": "SigNoz | Trace Detail", - "TRACES_EXPLORER": "SigNoz | Traces Explorer", - "SETTINGS": "SigNoz | Settings", - "USAGE_EXPLORER": "SigNoz | Usage Explorer", - "APPLICATION": "SigNoz | Home", - "ALL_DASHBOARD": "SigNoz | All Dashboards", - "DASHBOARD": "SigNoz | Dashboard", - "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", - "EDIT_ALERTS": "SigNoz | Edit Alerts", - "LIST_ALL_ALERT": "SigNoz | All Alerts", - "ALERTS_NEW": "SigNoz | New Alert", - "ALL_CHANNELS": "SigNoz | All Channels", - "CHANNELS_NEW": "SigNoz | New Channel", - "CHANNELS_EDIT": "SigNoz | Edit Channel", - "ALL_ERROR": "SigNoz | All Errors", - "ERROR_DETAIL": "SigNoz | Error Detail", - "VERSION": "SigNoz | Version", - "MY_SETTINGS": "SigNoz | My Settings", - "ORG_SETTINGS": "SigNoz | Organization Settings", - "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", - "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", - "UN_AUTHORIZED": "SigNoz | Unauthorized", - "NOT_FOUND": "SigNoz | Page Not Found", - "LOGS": "SigNoz | Logs", - "LOGS_EXPLORER": "SigNoz | Logs Explorer", - "LIVE_LOGS": "SigNoz | Live Logs", - "HOME_PAGE": "Open source Observability Platform | SigNoz", - "PASSWORD_RESET": "SigNoz | Password Reset", - "LIST_LICENSES": "SigNoz | List of Licenses", - "DEFAULT": "Open source Observability Platform | SigNoz" -} +{ + "SIGN_UP": "SigNoz | Sign Up", + "LOGIN": "SigNoz | Login", + "SERVICE_METRICS": "SigNoz | Service Metrics", + "SERVICE_MAP": "SigNoz | Service Map", + "GET_STARTED": "SigNoz | Get Started", + "TRACE": "SigNoz | Trace", + "TRACE_DETAIL": "SigNoz | Trace Detail", + "TRACES_EXPLORER": "SigNoz | Traces Explorer", + "SETTINGS": "SigNoz | Settings", + "USAGE_EXPLORER": "SigNoz | Usage Explorer", + "APPLICATION": "SigNoz | Home", + "BILLING": "SigNoz | Billing", + "ALL_DASHBOARD": "SigNoz | All Dashboards", + "DASHBOARD": "SigNoz | Dashboard", + "DASHBOARD_WIDGET": "SigNoz | Dashboard Widget", + "EDIT_ALERTS": "SigNoz | Edit Alerts", + "LIST_ALL_ALERT": "SigNoz | All Alerts", + "ALERTS_NEW": "SigNoz | New Alert", + "ALL_CHANNELS": "SigNoz | All Channels", + "CHANNELS_NEW": "SigNoz | New Channel", + "CHANNELS_EDIT": "SigNoz | Edit Channel", + "ALL_ERROR": "SigNoz | All Errors", + "ERROR_DETAIL": "SigNoz | Error Detail", + "VERSION": "SigNoz | Version", + "MY_SETTINGS": "SigNoz | My Settings", + "ORG_SETTINGS": "SigNoz | Organization Settings", + "INGESTION_SETTINGS": "SigNoz | Ingestion Settings", + "SOMETHING_WENT_WRONG": "SigNoz | Something Went Wrong", + "UN_AUTHORIZED": "SigNoz | Unauthorized", + "NOT_FOUND": "SigNoz | Page Not Found", + "LOGS": "SigNoz | Logs", + "LOGS_EXPLORER": "SigNoz | Logs Explorer", + "LIVE_LOGS": "SigNoz | Live Logs", + "HOME_PAGE": "Open source Observability Platform | SigNoz", + "PASSWORD_RESET": "SigNoz | Password Reset", + "LIST_LICENSES": "SigNoz | List of Licenses", + "WORKSPACE_LOCKED": "SigNoz | Workspace Locked", + "DEFAULT": "Open source Observability Platform | SigNoz" +} diff --git a/frontend/src/AppRoutes/Private.tsx b/frontend/src/AppRoutes/Private.tsx index ddfb072d02..70f8cccf04 100644 --- a/frontend/src/AppRoutes/Private.tsx +++ b/frontend/src/AppRoutes/Private.tsx @@ -5,6 +5,7 @@ import { Logout } from 'api/utils'; import Spinner from 'components/Spinner'; import { LOCALSTORAGE } from 'constants/localStorage'; import ROUTES from 'constants/routes'; +import useLicense from 'hooks/useLicense'; import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; import { ReactChild, useEffect, useMemo } from 'react'; @@ -37,13 +38,18 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { ), [pathname], ); + + const { data: licensesData } = useLicense(); + const { + user, isUserFetching, isUserFetchingError, isLoggedIn: isLoggedInState, } = useSelector((state) => state.app); const { t } = useTranslation(['common']); + const localStorageUserAuthToken = getInitialUserTokenRefreshToken(); const dispatch = useDispatch>(); @@ -51,6 +57,9 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { const currentRoute = mapRoutes.get('current'); + const isLocalStorageLoggedIn = + getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true'; + const navigateToLoginIfNotLoggedIn = (isLoggedIn = isLoggedInState): void => { dispatch({ type: UPDATE_USER_IS_FETCH, @@ -64,58 +73,87 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { } }; + const handleUserLoginIfTokenPresent = async ( + key: keyof typeof ROUTES, + ): Promise => { + if (localStorageUserAuthToken?.refreshJwt) { + // localstorage token is present + + // renew web access token + const response = await loginApi({ + refreshToken: localStorageUserAuthToken?.refreshJwt, + }); + + if (response.statusCode === 200) { + const route = routePermission[key]; + + // get all resource and put it over redux + const userResponse = await afterLogin( + response.payload.userId, + response.payload.accessJwt, + response.payload.refreshJwt, + ); + + if ( + userResponse && + route.find((e) => e === userResponse.payload.role) === undefined + ) { + history.push(ROUTES.UN_AUTHORIZED); + } + } else { + Logout(); + + notifications.error({ + message: response.error || t('something_went_wrong'), + }); + } + } + }; + + const handlePrivateRoutes = async ( + key: keyof typeof ROUTES, + ): Promise => { + if ( + localStorageUserAuthToken && + localStorageUserAuthToken.refreshJwt && + user?.userId === '' + ) { + handleUserLoginIfTokenPresent(key); + } else { + // user does have localstorage values + + navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn); + } + }; + + const navigateToWorkSpaceBlocked = (route: any): void => { + const { path } = route; + + if (path && path !== ROUTES.WORKSPACE_LOCKED) { + history.push(ROUTES.WORKSPACE_LOCKED); + } + + dispatch({ + type: UPDATE_USER_IS_FETCH, + payload: { + isUserFetching: false, + }, + }); + }; + // eslint-disable-next-line sonarjs/cognitive-complexity useEffect(() => { (async (): Promise => { try { - const isLocalStorageLoggedIn = - getLocalStorageApi(LOCALSTORAGE.IS_LOGGED_IN) === 'true'; + const shouldBlockWorkspace = licensesData?.payload?.workSpaceBlock; + if (currentRoute) { const { isPrivate, key } = currentRoute; - if (isPrivate) { - const localStorageUserAuthToken = getInitialUserTokenRefreshToken(); - - if ( - localStorageUserAuthToken && - localStorageUserAuthToken.refreshJwt && - isUserFetching - ) { - // localstorage token is present - const { refreshJwt } = localStorageUserAuthToken; - - // renew web access token - const response = await loginApi({ - refreshToken: refreshJwt, - }); - - if (response.statusCode === 200) { - const route = routePermission[key]; - - // get all resource and put it over redux - const userResponse = await afterLogin( - response.payload.userId, - response.payload.accessJwt, - response.payload.refreshJwt, - ); - - if ( - userResponse && - route.find((e) => e === userResponse.payload.role) === undefined - ) { - history.push(ROUTES.UN_AUTHORIZED); - } - } else { - Logout(); - - notifications.error({ - message: response.error || t('something_went_wrong'), - }); - } - } else { - // user does have localstorage values - navigateToLoginIfNotLoggedIn(isLocalStorageLoggedIn); - } + if (shouldBlockWorkspace) { + navigateToWorkSpaceBlocked(currentRoute); + } else if (isPrivate) { + handlePrivateRoutes(key); } else { // no need to fetch the user and make user fetching false @@ -145,7 +183,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element { history.push(ROUTES.SOMETHING_WENT_WRONG); } })(); - }, [dispatch, isLoggedInState, currentRoute]); + }, [dispatch, isLoggedInState, currentRoute, licensesData]); if (isUserFetchingError) { return ; diff --git a/frontend/src/AppRoutes/index.tsx b/frontend/src/AppRoutes/index.tsx index c2a0db3da1..a2330d1aeb 100644 --- a/frontend/src/AppRoutes/index.tsx +++ b/frontend/src/AppRoutes/index.tsx @@ -9,6 +9,7 @@ import ROUTES from 'constants/routes'; import AppLayout from 'container/AppLayout'; import { useThemeConfig } from 'hooks/useDarkMode'; import useGetFeatureFlag from 'hooks/useGetFeatureFlag'; +import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense'; import { NotificationProvider } from 'hooks/useNotifications'; import { ResourceProvider } from 'hooks/useResourceAttribute'; import history from 'lib/history'; @@ -29,8 +30,9 @@ import defaultRoutes from './routes'; function App(): JSX.Element { const themeConfig = useThemeConfig(); + const { data } = useLicense(); const [routes, setRoutes] = useState(defaultRoutes); - const { isLoggedIn: isLoggedInState, user } = useSelector< + const { role, isLoggedIn: isLoggedInState, user } = useSelector< AppState, AppReducer >((state) => state.app); @@ -78,6 +80,12 @@ function App(): JSX.Element { } }); + const isOnBasicPlan = + data?.payload?.licenses?.some( + (license) => + license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, + ) || data?.payload?.licenses === null; + useEffect(() => { const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER); @@ -97,8 +105,13 @@ function App(): JSX.Element { window.clarity('identify', user.email, user.name); } + + if (isOnBasicPlan || (isLoggedInState && role && role !== 'ADMIN')) { + const newRoutes = routes.filter((route) => route?.path !== ROUTES.BILLING); + setRoutes(newRoutes); + } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [isLoggedInState, user]); + }, [isLoggedInState, isOnBasicPlan, user]); useEffect(() => { trackPageView(pathname); diff --git a/frontend/src/AppRoutes/pageComponents.ts b/frontend/src/AppRoutes/pageComponents.ts index 3852153da8..ad33f3a83c 100644 --- a/frontend/src/AppRoutes/pageComponents.ts +++ b/frontend/src/AppRoutes/pageComponents.ts @@ -153,3 +153,12 @@ export const LogsIndexToFields = Loadable( export const PipelinePage = Loadable( () => import(/* webpackChunkName: "Pipelines" */ 'pages/Pipelines'), ); + +export const BillingPage = Loadable( + () => import(/* webpackChunkName: "BillingPage" */ 'pages/Billing'), +); + +export const WorkspaceBlocked = Loadable( + () => + import(/* webpackChunkName: "WorkspaceLocked" */ 'pages/WorkspaceLocked'), +); diff --git a/frontend/src/AppRoutes/routes.ts b/frontend/src/AppRoutes/routes.ts index 0c0f5ae9cb..dfda9f8312 100644 --- a/frontend/src/AppRoutes/routes.ts +++ b/frontend/src/AppRoutes/routes.ts @@ -1,9 +1,11 @@ import ROUTES from 'constants/routes'; +import WorkspaceBlocked from 'pages/WorkspaceLocked'; import { RouteProps } from 'react-router-dom'; import { AllAlertChannels, AllErrors, + BillingPage, CreateAlertChannelAlerts, CreateNewAlerts, DashboardPage, @@ -285,6 +287,21 @@ const routes: AppRoutes[] = [ key: 'PIPELINES', isPrivate: true, }, + + { + path: ROUTES.BILLING, + exact: true, + component: BillingPage, + key: 'BILLING', + isPrivate: true, + }, + { + path: ROUTES.WORKSPACE_LOCKED, + exact: true, + component: WorkspaceBlocked, + isPrivate: false, + key: 'WORKSPACE_LOCKED', + }, ]; export interface AppRoutes { diff --git a/frontend/src/api/billing/checkout.ts b/frontend/src/api/billing/checkout.ts new file mode 100644 index 0000000000..e6c7640629 --- /dev/null +++ b/frontend/src/api/billing/checkout.ts @@ -0,0 +1,31 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, +} from 'types/api/billing/checkout'; + +const updateCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/checkout', { + licenseKey: props.licenseKey, + successURL: props.successURL, + cancelURL: props.cancelURL, // temp + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default updateCreditCardApi; diff --git a/frontend/src/api/billing/getUsage.ts b/frontend/src/api/billing/getUsage.ts new file mode 100644 index 0000000000..1cb5be5640 --- /dev/null +++ b/frontend/src/api/billing/getUsage.ts @@ -0,0 +1,35 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; + +export interface UsageResponsePayloadProps { + billingPeriodStart: Date; + billingPeriodEnd: Date; + details: { + total: number; + baseFee: number; + breakdown: []; + billTotal: number; + }; + discount: number; +} + +const getUsage = async ( + licenseKey: string, +): Promise | ErrorResponse> => { + try { + const response = await axios.get(`/billing?licenseKey=${licenseKey}`); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default getUsage; diff --git a/frontend/src/api/dashboard/create.ts b/frontend/src/api/dashboard/create.ts index 3796eb685e..bf5458ac40 100644 --- a/frontend/src/api/dashboard/create.ts +++ b/frontend/src/api/dashboard/create.ts @@ -4,7 +4,7 @@ import { AxiosError } from 'axios'; import { ErrorResponse, SuccessResponse } from 'types/api'; import { PayloadProps, Props } from 'types/api/dashboard/create'; -const create = async ( +const createDashboard = async ( props: Props, ): Promise | ErrorResponse> => { const url = props.uploadedGrafana ? '/dashboards/grafana' : '/dashboards'; @@ -24,4 +24,4 @@ const create = async ( } }; -export default create; +export default createDashboard; diff --git a/frontend/src/api/licenses/getAll.ts b/frontend/src/api/licenses/getAll.ts index bce8c6b1b6..4782be323f 100644 --- a/frontend/src/api/licenses/getAll.ts +++ b/frontend/src/api/licenses/getAll.ts @@ -1,4 +1,4 @@ -import axios from 'api'; +import { ApiV2Instance as axios } from 'api'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { AxiosError } from 'axios'; import { ErrorResponse, SuccessResponse } from 'types/api'; diff --git a/frontend/src/assets/NotFound.tsx b/frontend/src/assets/NotFound.tsx index 383435cb6a..b8bf4d0869 100644 --- a/frontend/src/assets/NotFound.tsx +++ b/frontend/src/assets/NotFound.tsx @@ -1,263 +1,13 @@ function NotFound(): JSX.Element { return ( - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + not-found ); } diff --git a/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap b/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap index cd16f3163a..5415d86836 100644 --- a/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap +++ b/frontend/src/components/NotFound/__snapshots__/NotFound.test.tsx.snap @@ -99,272 +99,11 @@ exports[`Not Found page test should render Not Found page without errors 1`] = `
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + not-found
diff --git a/frontend/src/constants/reactQueryKeys.ts b/frontend/src/constants/reactQueryKeys.ts index ec55889516..63fc205d81 100644 --- a/frontend/src/constants/reactQueryKeys.ts +++ b/frontend/src/constants/reactQueryKeys.ts @@ -4,6 +4,7 @@ export const REACT_QUERY_KEY = { GET_ALL_DASHBOARDS: 'GET_ALL_DASHBOARDS', GET_TRIGGERED_ALERTS: 'GET_TRIGGERED_ALERTS', DASHBOARD_BY_ID: 'DASHBOARD_BY_ID', + GET_BILLING_USAGE: 'GET_BILLING_USAGE', GET_FEATURES_FLAGS: 'GET_FEATURES_FLAGS', DELETE_DASHBOARD: 'DELETE_DASHBOARD', LOGS_PIPELINE_PREVIEW: 'LOGS_PIPELINE_PREVIEW', diff --git a/frontend/src/constants/routes.ts b/frontend/src/constants/routes.ts index b156036ce4..a66e7e7b4e 100644 --- a/frontend/src/constants/routes.ts +++ b/frontend/src/constants/routes.ts @@ -38,6 +38,8 @@ const ROUTES = { LOGS_PIPELINE: '/logs-explorer/pipeline', TRACE_EXPLORER: '/trace-explorer', PIPELINES: '/pipelines', + BILLING: '/billing', + WORKSPACE_LOCKED: '/workspace-locked', }; export default ROUTES; diff --git a/frontend/src/container/AppLayout/index.tsx b/frontend/src/container/AppLayout/index.tsx index b4dddf1a7b..b47885cf7b 100644 --- a/frontend/src/container/AppLayout/index.tsx +++ b/frontend/src/container/AppLayout/index.tsx @@ -191,7 +191,8 @@ function AppLayout(props: AppLayoutProps): JSX.Element { const routeKey = useMemo(() => getRouteKey(pathname), [pathname]); const pageTitle = t(routeKey); - const renderFullScreen = pathname === ROUTES.GET_STARTED; + const renderFullScreen = + pathname === ROUTES.GET_STARTED || pathname === ROUTES.WORKSPACE_LOCKED; return ( diff --git a/frontend/src/container/BillingContainer/BillingContainer.styles.scss b/frontend/src/container/BillingContainer/BillingContainer.styles.scss new file mode 100644 index 0000000000..afb9e80253 --- /dev/null +++ b/frontend/src/container/BillingContainer/BillingContainer.styles.scss @@ -0,0 +1,36 @@ +.billing-container { + padding: 16px 0; + width: 100%; + + .billing-summary { + margin: 24px 8px; + } + + .billing-details { + margin: 36px 8px; + } + + .upgrade-plan-benefits { + margin: 0px 8px; + border: 1px solid #333; + border-radius: 5px; + padding: 0 48px; + .plan-benefits { + .plan-benefit { + display: flex; + align-items: center; + gap: 16px; + margin: 16px 0; + } + } + } +} + +.ant-skeleton.ant-skeleton-element.ant-skeleton-active { + width: 100%; + min-width: 100%; +} + +.ant-skeleton.ant-skeleton-element .ant-skeleton-input { + min-width: 100% !important; +} diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx new file mode 100644 index 0000000000..aa674e780a --- /dev/null +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -0,0 +1,432 @@ +/* eslint-disable @typescript-eslint/no-loop-func */ +import './BillingContainer.styles.scss'; + +import { CheckCircleOutlined } from '@ant-design/icons'; +import { Button, Col, Row, Skeleton, Table, Tag, Typography } from 'antd'; +import { ColumnsType } from 'antd/es/table'; +import updateCreditCardApi from 'api/billing/checkout'; +import getUsage from 'api/billing/getUsage'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import useAxiosError from 'hooks/useAxiosError'; +import useLicense from 'hooks/useLicense'; +import { useNotifications } from 'hooks/useNotifications'; +import { useCallback, useEffect, useState } from 'react'; +import { useMutation, useQuery } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { License } from 'types/api/licenses/def'; +import AppReducer from 'types/reducer/app'; + +interface DataType { + key: string; + name: string; + unit: string; + dataIngested: string; + pricePerUnit: string; + cost: string; +} + +const renderSkeletonInput = (): JSX.Element => ( + +); + +const dummyData: DataType[] = [ + { + key: '1', + name: 'Logs', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, + { + key: '2', + name: 'Traces', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, + { + key: '3', + name: 'Metrics', + unit: '', + dataIngested: '', + pricePerUnit: '', + cost: '', + }, +]; + +const dummyColumns: ColumnsType = [ + { + title: '', + dataIndex: 'name', + key: 'name', + render: renderSkeletonInput, + }, + { + title: 'Unit', + dataIndex: 'unit', + key: 'unit', + render: renderSkeletonInput, + }, + { + title: 'Data Ingested', + dataIndex: 'dataIngested', + key: 'dataIngested', + render: renderSkeletonInput, + }, + { + title: 'Price per Unit', + dataIndex: 'pricePerUnit', + key: 'pricePerUnit', + render: renderSkeletonInput, + }, + { + title: 'Cost (Billing period to date)', + dataIndex: 'cost', + key: 'cost', + render: renderSkeletonInput, + }, +]; + +export const getRemainingDays = (billingEndDate: number): number => { + // Convert Epoch timestamps to Date objects + const startDate = new Date(); // Convert seconds to milliseconds + const endDate = new Date(billingEndDate * 1000); // Convert seconds to milliseconds + + // Calculate the time difference in milliseconds + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + const timeDifference = endDate - startDate; + + return Math.ceil(timeDifference / (1000 * 60 * 60 * 24)); +}; + +export const getFormattedDate = (date?: number): string => { + if (!date) { + return new Date().toLocaleDateString(); + } + const trialEndDate = new Date(date * 1000); + + const options = { day: 'numeric', month: 'short', year: 'numeric' }; + + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + return trialEndDate.toLocaleDateString(undefined, options); +}; + +export default function BillingContainer(): JSX.Element { + const daysRemainingStr = 'days remaining in your billing period.'; + const [headerText, setHeaderText] = useState(''); + const [billAmount, setBillAmount] = useState(0); + const [totalBillAmount, setTotalBillAmount] = useState(0); + const [activeLicense, setActiveLicense] = useState(null); + const [daysRemaining, setDaysRemaining] = useState(0); + const [isFreeTrial, setIsFreeTrial] = useState(false); + const [data, setData] = useState([]); + const billCurrency = '$'; + + const { isFetching, data: licensesData, error: licenseError } = useLicense(); + + const { user } = useSelector((state) => state.app); + const { notifications } = useNotifications(); + + const handleError = useAxiosError(); + + const { isLoading, data: usageData } = useQuery( + [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], + { + queryFn: () => getUsage(activeLicense?.key || ''), + onError: handleError, + enabled: activeLicense !== null, + }, + ); + + useEffect(() => { + const activeValidLicense = + licensesData?.payload?.licenses?.find( + (license) => license.isCurrent === true, + ) || null; + + setActiveLicense(activeValidLicense); + + if (!isFetching && licensesData?.payload?.onTrial && !licenseError) { + setIsFreeTrial(true); + setBillAmount(0); + setDaysRemaining(getRemainingDays(licensesData?.payload?.trialEnd)); + setHeaderText( + `You are in free trial period. Your free trial will end on ${getFormattedDate( + licensesData?.payload?.trialEnd, + )}`, + ); + } + }, [isFetching, licensesData?.payload, licenseError]); + + const processUsageData = useCallback( + (data: any): void => { + const { + details: { breakdown = [], total, billTotal }, + billingPeriodStart, + billingPeriodEnd, + } = data?.payload || {}; + const formattedUsageData: any[] = []; + + for (let index = 0; index < breakdown.length; index += 1) { + const element = breakdown[index]; + + element?.tiers.forEach( + ( + tier: { quantity: number; unitPrice: number; tierCost: number }, + i: number, + ) => { + formattedUsageData.push({ + key: `${index}${i}`, + name: i === 0 ? element?.type : '', + unit: element?.unit, + dataIngested: tier.quantity, + pricePerUnit: tier.unitPrice, + cost: `$ ${tier.tierCost}`, + }); + }, + ); + } + + setData(formattedUsageData); + setTotalBillAmount(total); + + if (!licensesData?.payload?.onTrial) { + setHeaderText( + `Your current billing period is from ${getFormattedDate( + billingPeriodStart, + )} to ${getFormattedDate(billingPeriodEnd)}`, + ); + setDaysRemaining(getRemainingDays(billingPeriodEnd) - 1); + setBillAmount(billTotal); + } + }, + [licensesData?.payload?.onTrial], + ); + + useEffect(() => { + if (!isLoading && usageData) { + processUsageData(usageData); + } + }, [isLoading, processUsageData, usageData]); + + const columns: ColumnsType = [ + { + title: '', + dataIndex: 'name', + key: 'name', + render: (text): JSX.Element =>
{text}
, + }, + { + title: 'Unit', + dataIndex: 'unit', + key: 'unit', + }, + { + title: 'Data Ingested', + dataIndex: 'dataIngested', + key: 'dataIngested', + }, + { + title: 'Price per Unit', + dataIndex: 'pricePerUnit', + key: 'pricePerUnit', + }, + { + title: 'Cost (Billing period to date)', + dataIndex: 'cost', + key: 'cost', + }, + ]; + + const renderSummary = (): JSX.Element => ( + + + + Total + + +   +   +   + + + ${totalBillAmount} + + + + ); + + const renderTableSkeleton = (): JSX.Element => ( + ( + + )), + }} + /> + ); + + const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation( + updateCreditCardApi, + { + onSuccess: (data) => { + if (data.payload?.redirectURL) { + const newTab = document.createElement('a'); + newTab.href = data.payload.redirectURL; + newTab.target = '_blank'; + newTab.rel = 'noopener noreferrer'; + newTab.click(); + } + }, + onError: () => + notifications.error({ + message: SOMETHING_WENT_WRONG, + }), + }, + ); + + const handleBilling = useCallback(async () => { + updateCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.href, + cancelURL: window.location.href, + }); + }, [activeLicense?.key, updateCreditCard]); + + return ( +
+ +
+ + {headerText} + + + {licensesData?.payload?.onTrial && + licensesData?.payload?.trialConvertedToSubscription && ( + + We have received your card details, your billing will only start after + the end of your free trial period. + + )} + + + + + + + +
+ + Current bill total + + + + {billCurrency} + {billAmount}   + {isFreeTrial ? Free Trial : ''} + + + + {daysRemaining} {daysRemainingStr} + +
+ +
+ {!isLoading && ( +
+ )} + + {isLoading && renderTableSkeleton()} + + + {isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription && ( +
+ +
+ + + Upgrade now to have uninterrupted access + + + + You will be charged only when trial period ends + + + + + Check out features in paid plans   + + here + + + + + + + + + + )} + + ); +} diff --git a/frontend/src/container/Header/Header.styles.scss b/frontend/src/container/Header/Header.styles.scss new file mode 100644 index 0000000000..82dd9b81ff --- /dev/null +++ b/frontend/src/container/Header/Header.styles.scss @@ -0,0 +1,12 @@ +.trial-expiry-banner { + padding: 8px; + background-color: #f25733; + color: white; + text-align: center; +} + +.upgrade-link { + padding: 0px; + padding-right: 4px; + color: white; +} diff --git a/frontend/src/container/Header/ManageLicense/index.tsx b/frontend/src/container/Header/ManageLicense/index.tsx index 377af48103..fee671f641 100644 --- a/frontend/src/container/Header/ManageLicense/index.tsx +++ b/frontend/src/container/Header/ManageLicense/index.tsx @@ -21,7 +21,7 @@ function ManageLicense({ onToggle }: ManageLicenseProps): JSX.Element { return ; } - const isEnterprise = data?.payload?.some( + const isEnterprise = data?.payload?.licenses?.some( (license) => license.isCurrent && license.planKey === LICENSE_PLAN_KEY.ENTERPRISE_PLAN, ); diff --git a/frontend/src/container/Header/index.tsx b/frontend/src/container/Header/index.tsx index ae98295ada..d2463a5e76 100644 --- a/frontend/src/container/Header/index.tsx +++ b/frontend/src/container/Header/index.tsx @@ -1,3 +1,5 @@ +import './Header.styles.scss'; + import { CaretDownFilled, CaretUpFilled, @@ -6,14 +8,20 @@ import { import { Button, Divider, MenuProps, Space, Typography } from 'antd'; import { Logout } from 'api/utils'; import ROUTES from 'constants/routes'; +import { + getFormattedDate, + getRemainingDays, +} from 'container/BillingContainer/BillingContainer'; import Config from 'container/ConfigDropdown'; import { useIsDarkMode, useThemeMode } from 'hooks/useDarkMode'; import useLicense, { LICENSE_PLAN_STATUS } from 'hooks/useLicense'; +import history from 'lib/history'; import { Dispatch, KeyboardEvent, SetStateAction, useCallback, + useEffect, useMemo, useState, } from 'react'; @@ -37,11 +45,13 @@ import { } from './styles'; function HeaderContainer(): JSX.Element { - const { user, currentVersion } = useSelector( + const { user, role, currentVersion } = useSelector( (state) => state.app, ); const isDarkMode = useIsDarkMode(); const { toggleTheme } = useThemeMode(); + const [showTrialExpiryBanner, setShowTrialExpiryBanner] = useState(false); + const [homeRoute, setHomeRoute] = useState(ROUTES.APPLICATION); const [isUserDropDownOpen, setIsUserDropDownOpen] = useState(false); @@ -97,58 +107,100 @@ function HeaderContainer(): JSX.Element { ); }; - const { data } = useLicense(); + const { data: licenseData, isFetching } = useLicense(); const isLicenseActive = - data?.payload?.find((e) => e.isCurrent)?.status === LICENSE_PLAN_STATUS.VALID; + licenseData?.payload?.licenses?.find((e) => e.isCurrent)?.status === + LICENSE_PLAN_STATUS.VALID; + + useEffect(() => { + if ( + !isFetching && + licenseData?.payload?.onTrial && + !licenseData?.payload?.trialConvertedToSubscription && + getRemainingDays(licenseData?.payload.trialEnd) < 7 + ) { + setShowTrialExpiryBanner(true); + } + + if (!isFetching && licenseData?.payload?.workSpaceBlock) { + setHomeRoute(ROUTES.WORKSPACE_LOCKED); + } + }, [licenseData, isFetching]); + + const handleUpgrade = (): void => { + if (role === 'ADMIN') { + history.push(ROUTES.BILLING); + } + }; return ( -
- - - - SigNoz - - SigNoz - - - - - - {!isLicenseActive && ( - + <> + {showTrialExpiryBanner && ( +
+ You are in free trial period. Your free trial will end on{' '} + {getFormattedDate(licenseData?.payload?.trialEnd)}. + {role === 'ADMIN' ? ( + + Please{' '} + + to continue using SigNoz features. + + ) : ( + 'Please contact your administrator for upgrading to a paid plan.' )} - +
+ )} - +
+ + + + SigNoz + + SigNoz + + + - - - {user?.name[0]} - - {!isUserDropDownOpen ? : } - - - - - -
+ + {!isLicenseActive && ( + + )} + + + + + + + {user?.name[0]} + + {!isUserDropDownOpen ? : } + + + + +
+
+ ); } diff --git a/frontend/src/container/Licenses/ListLicenses.tsx b/frontend/src/container/Licenses/ListLicenses.tsx index d0ca5f0782..02d3abbb65 100644 --- a/frontend/src/container/Licenses/ListLicenses.tsx +++ b/frontend/src/container/Licenses/ListLicenses.tsx @@ -2,7 +2,6 @@ import { ColumnsType } from 'antd/lib/table'; import { ResizeTable } from 'components/ResizeTable'; import { useTranslation } from 'react-i18next'; import { License } from 'types/api/licenses/def'; -import { PayloadProps } from 'types/api/licenses/getAll'; function ListLicenses({ licenses }: ListLicensesProps): JSX.Element { const { t } = useTranslation(['licenses']); @@ -38,7 +37,7 @@ function ListLicenses({ licenses }: ListLicensesProps): JSX.Element { } interface ListLicensesProps { - licenses: PayloadProps; + licenses: License[]; } export default ListLicenses; diff --git a/frontend/src/container/Licenses/index.tsx b/frontend/src/container/Licenses/index.tsx index b4d068d908..351d78a636 100644 --- a/frontend/src/container/Licenses/index.tsx +++ b/frontend/src/container/Licenses/index.tsx @@ -19,7 +19,7 @@ function Licenses(): JSX.Element { } const allValidLicense = - data?.payload?.filter((license) => license.isCurrent) || []; + data?.payload?.licenses?.filter((license) => license.isCurrent) || []; const tabs = [ { diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index 1570e12b70..85ca6295ee 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -4,6 +4,7 @@ import getLocalStorageKey from 'api/browser/localstorage/get'; import { IS_SIDEBAR_COLLAPSED } from 'constants/app'; import { FeatureKeys } from 'constants/features'; import ROUTES from 'constants/routes'; +import useLicense, { LICENSE_PLAN_KEY } from 'hooks/useLicense'; import history from 'lib/history'; import { useCallback, useLayoutEffect, useMemo, useState } from 'react'; import { useTranslation } from 'react-i18next'; @@ -34,12 +35,21 @@ function SideNav(): JSX.Element { getLocalStorageKey(IS_SIDEBAR_COLLAPSED) === 'true', ); const { + role, currentVersion, latestVersion, isCurrentVersionError, featureResponse, } = useSelector((state) => state.app); + const { data } = useLicense(); + + const isOnBasicPlan = + data?.payload?.licenses?.some( + (license) => + license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, + ) || data?.payload?.licenses === null; + const { hostname } = window.location; const menuItems = useMemo( @@ -50,6 +60,10 @@ function SideNav(): JSX.Element { (feature) => feature.name === FeatureKeys.ONBOARDING, )?.active || false; + if (role !== 'ADMIN' || isOnBasicPlan) { + return item.key !== ROUTES.BILLING; + } + if ( !isOnboardingEnabled || !(hostname && hostname.endsWith('signoz.cloud')) @@ -59,7 +73,7 @@ function SideNav(): JSX.Element { return true; }), - [featureResponse.data, hostname], + [featureResponse.data, isOnBasicPlan, hostname, role], ); const { pathname, search } = useLocation(); diff --git a/frontend/src/container/SideNav/config.ts b/frontend/src/container/SideNav/config.ts index 4e6b628457..efb221e52f 100644 --- a/frontend/src/container/SideNav/config.ts +++ b/frontend/src/container/SideNav/config.ts @@ -46,4 +46,5 @@ export const routeConfig: Record = { [ROUTES.VERSION]: [QueryParams.resourceAttributes], [ROUTES.TRACE_EXPLORER]: [QueryParams.resourceAttributes], [ROUTES.PIPELINES]: [QueryParams.resourceAttributes], + [ROUTES.WORKSPACE_LOCKED]: [QueryParams.resourceAttributes], }; diff --git a/frontend/src/container/SideNav/menuItems.tsx b/frontend/src/container/SideNav/menuItems.tsx index 4121c93ac0..a68cbaf1f4 100644 --- a/frontend/src/container/SideNav/menuItems.tsx +++ b/frontend/src/container/SideNav/menuItems.tsx @@ -5,6 +5,7 @@ import { BugOutlined, DashboardFilled, DeploymentUnitOutlined, + FileDoneOutlined, LineChartOutlined, MenuOutlined, RocketOutlined, @@ -60,6 +61,11 @@ const menuItems: SidebarMenu[] = [ label: 'Usage Explorer', icon: , }, + { + key: ROUTES.BILLING, + label: 'Billing', + icon: , + }, { key: ROUTES.SETTINGS, label: 'Settings', diff --git a/frontend/src/container/TopNav/Breadcrumbs/index.tsx b/frontend/src/container/TopNav/Breadcrumbs/index.tsx index 3c3b88da79..855dd1103d 100644 --- a/frontend/src/container/TopNav/Breadcrumbs/index.tsx +++ b/frontend/src/container/TopNav/Breadcrumbs/index.tsx @@ -24,6 +24,8 @@ const breadcrumbNameMap = { [ROUTES.LOGS_EXPLORER]: 'Logs Explorer', [ROUTES.LIVE_LOGS]: 'Live View', [ROUTES.PIPELINES]: 'Pipelines', + [ROUTES.BILLING]: 'Billing', + [ROUTES.WORKSPACE_LOCKED]: 'Workspace Locked', }; function ShowBreadcrumbs(props: RouteComponentProps): JSX.Element { @@ -50,7 +52,7 @@ function ShowBreadcrumbs(props: RouteComponentProps): JSX.Element { const breadcrumbItems = [ - Home + Home , ].concat(extraBreadcrumbItems); diff --git a/frontend/src/container/TopNav/DateTimeSelection/config.ts b/frontend/src/container/TopNav/DateTimeSelection/config.ts index a085aa9015..d8a438bff8 100644 --- a/frontend/src/container/TopNav/DateTimeSelection/config.ts +++ b/frontend/src/container/TopNav/DateTimeSelection/config.ts @@ -84,6 +84,8 @@ export const routesToSkip = [ ROUTES.EDIT_ALERTS, ROUTES.LIST_ALL_ALERT, ROUTES.PIPELINES, + ROUTES.BILLING, + ROUTES.WORKSPACE_LOCKED, ]; export const routesToDisable = [ROUTES.LOGS_EXPLORER, ROUTES.LIVE_LOGS]; diff --git a/frontend/src/hooks/useLicense/constant.ts b/frontend/src/hooks/useLicense/constant.ts index 03bbb7325c..55f81dac46 100644 --- a/frontend/src/hooks/useLicense/constant.ts +++ b/frontend/src/hooks/useLicense/constant.ts @@ -1,5 +1,6 @@ export const LICENSE_PLAN_KEY = { ENTERPRISE_PLAN: 'ENTERPRISE_PLAN', + BASIC_PLAN: 'BASIC_PLAN ', }; export const LICENSE_PLAN_STATUS = { diff --git a/frontend/src/hooks/useUsage/useUsage.tsx b/frontend/src/hooks/useUsage/useUsage.tsx new file mode 100644 index 0000000000..0abcba5ce1 --- /dev/null +++ b/frontend/src/hooks/useUsage/useUsage.tsx @@ -0,0 +1,25 @@ +import getAll from 'api/licenses/getAll'; +import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; +import { useQuery, UseQueryResult } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { PayloadProps } from 'types/api/licenses/getAll'; +import AppReducer from 'types/reducer/app'; + +const useLicense = (): UseLicense => { + const { user } = useSelector((state) => state.app); + + return useQuery({ + queryFn: getAll, + queryKey: [REACT_QUERY_KEY.GET_ALL_LICENCES, user?.email], + enabled: !!user?.email, + }); +}; + +type UseLicense = UseQueryResult< + SuccessResponse | ErrorResponse, + unknown +>; + +export default useLicense; diff --git a/frontend/src/pages/Billing/BillingPage.styles.scss b/frontend/src/pages/Billing/BillingPage.styles.scss new file mode 100644 index 0000000000..ced1d4d055 --- /dev/null +++ b/frontend/src/pages/Billing/BillingPage.styles.scss @@ -0,0 +1,5 @@ +.billingPageContainer { + display: flex; + width: 100%; + color: #fff; +} diff --git a/frontend/src/pages/Billing/BillingPage.tsx b/frontend/src/pages/Billing/BillingPage.tsx new file mode 100644 index 0000000000..ec2123cd4c --- /dev/null +++ b/frontend/src/pages/Billing/BillingPage.tsx @@ -0,0 +1,13 @@ +import './BillingPage.styles.scss'; + +import BillingContainer from 'container/BillingContainer/BillingContainer'; + +function BillingPage(): JSX.Element { + return ( +
+ +
+ ); +} + +export default BillingPage; diff --git a/frontend/src/pages/Billing/index.tsx b/frontend/src/pages/Billing/index.tsx new file mode 100644 index 0000000000..8dad400fe0 --- /dev/null +++ b/frontend/src/pages/Billing/index.tsx @@ -0,0 +1,3 @@ +import BillingPage from './BillingPage'; + +export default BillingPage; diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss new file mode 100644 index 0000000000..f80a4925bc --- /dev/null +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.styles.scss @@ -0,0 +1,19 @@ +.workspace-locked-container { + text-align: center; + padding: 48px; + margin: 48px; +} + +.workpace-locked-details { + width: 50%; + margin: 0 auto; +} + +.update-credit-card-btn { + margin: 24px 0; + border-radius: 5px; +} + +.contact-us { + margin-top: 48px; +} diff --git a/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx new file mode 100644 index 0000000000..1dfba2694c --- /dev/null +++ b/frontend/src/pages/WorkspaceLocked/WorkspaceLocked.tsx @@ -0,0 +1,97 @@ +/* eslint-disable react/no-unescaped-entities */ +import './WorkspaceLocked.styles.scss'; + +import { CreditCardOutlined, LockOutlined } from '@ant-design/icons'; +import { Button, Card, Typography } from 'antd'; +import updateCreditCardApi from 'api/billing/checkout'; +import { SOMETHING_WENT_WRONG } from 'constants/api'; +import { getFormattedDate } from 'container/BillingContainer/BillingContainer'; +import useLicense from 'hooks/useLicense'; +import { useNotifications } from 'hooks/useNotifications'; +import { useCallback, useEffect, useState } from 'react'; +import { useMutation } from 'react-query'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { License } from 'types/api/licenses/def'; +import AppReducer from 'types/reducer/app'; + +export default function WorkspaceBlocked(): JSX.Element { + const { role } = useSelector((state) => state.app); + const isAdmin = role === 'ADMIN'; + const [activeLicense, setActiveLicense] = useState(null); + + const { notifications } = useNotifications(); + + const { isFetching, data: licensesData } = useLicense(); + + useEffect(() => { + const activeValidLicense = + licensesData?.payload?.licenses?.find( + (license) => license.isCurrent === true, + ) || null; + + setActiveLicense(activeValidLicense); + }, [isFetching, licensesData]); + + const { mutate: updateCreditCard, isLoading } = useMutation( + updateCreditCardApi, + { + onSuccess: (data) => { + if (data.payload?.redirectURL) { + const newTab = document.createElement('a'); + newTab.href = data.payload.redirectURL; + newTab.target = '_blank'; + newTab.rel = 'noopener noreferrer'; + newTab.click(); + } + }, + onError: () => + notifications.error({ + message: SOMETHING_WENT_WRONG, + }), + }, + ); + + const handleUpdateCreditCard = useCallback(async () => { + updateCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.origin, + cancelURL: window.location.origin, + }); + }, [activeLicense?.key, updateCreditCard]); + + return ( + + + Workspace Locked + + + You have been locked out of your workspace because your trial ended without + an upgrade to a paid plan. Your data will continue to be ingested till{' '} + {getFormattedDate(licensesData?.payload?.gracePeriodEnd)} , at which point + we will drop all the ingested data and terminate the account. + {!isAdmin && 'Please contact your administrator for further help'} + + + {isAdmin && ( + + )} + +
+ Got Questions? + + Contact Us + +
+
+ ); +} diff --git a/frontend/src/pages/WorkspaceLocked/index.tsx b/frontend/src/pages/WorkspaceLocked/index.tsx new file mode 100644 index 0000000000..557461a23a --- /dev/null +++ b/frontend/src/pages/WorkspaceLocked/index.tsx @@ -0,0 +1,3 @@ +import WorkspaceLocked from './WorkspaceLocked'; + +export default WorkspaceLocked; diff --git a/frontend/src/types/api/billing/checkout.ts b/frontend/src/types/api/billing/checkout.ts new file mode 100644 index 0000000000..b299b3ef84 --- /dev/null +++ b/frontend/src/types/api/billing/checkout.ts @@ -0,0 +1,9 @@ +export interface CheckoutSuccessPayloadProps { + redirectURL: string; +} + +export interface CheckoutRequestPayloadProps { + licenseKey: string; + successURL: string; + cancelURL: string; +} diff --git a/frontend/src/types/api/licenses/getAll.ts b/frontend/src/types/api/licenses/getAll.ts index 48a4394f43..95ee48aca5 100644 --- a/frontend/src/types/api/licenses/getAll.ts +++ b/frontend/src/types/api/licenses/getAll.ts @@ -1,3 +1,11 @@ import { License } from './def'; -export type PayloadProps = License[]; +export type PayloadProps = { + trialStart: number; + trialEnd: number; + onTrial: boolean; + workSpaceBlock: boolean; + trialConvertedToSubscription: boolean; + gracePeriodEnd: number; + licenses: License[]; +}; diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts index 7b9b82bae7..1ca3064720 100644 --- a/frontend/src/utils/permission/index.ts +++ b/frontend/src/utils/permission/index.ts @@ -64,7 +64,6 @@ export const routePermission: Record = { SERVICE_METRICS: ['ADMIN', 'EDITOR', 'VIEWER'], SETTINGS: ['ADMIN', 'EDITOR', 'VIEWER'], SIGN_UP: ['ADMIN', 'EDITOR', 'VIEWER'], - SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'], TRACES_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'], TRACE: ['ADMIN', 'EDITOR', 'VIEWER'], TRACE_DETAIL: ['ADMIN', 'EDITOR', 'VIEWER'], @@ -80,4 +79,7 @@ export const routePermission: Record = { TRACE_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'], PIPELINES: ['ADMIN', 'EDITOR', 'VIEWER'], GET_STARTED: ['ADMIN', 'EDITOR', 'VIEWER'], + WORKSPACE_LOCKED: ['ADMIN', 'EDITOR', 'VIEWER'], + BILLING: ['ADMIN', 'EDITOR', 'VIEWER'], + SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'], }; From f8d3fa0fdb2bdb7f4f14f994adfb70c7515e47a7 Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Tue, 17 Oct 2023 11:49:50 +0530 Subject: [PATCH 05/23] chore: update query service telemetry (#3735) * chore: update query service telemetry * chore: address review comments * chore: add group call and update metrics condition * chore: update company_domain --- ee/query-service/app/server.go | 50 +++++++----- ee/query-service/constants/constants.go | 2 +- pkg/query-service/app/server.go | 49 +++++++----- pkg/query-service/dao/sqlite/connection.go | 1 + pkg/query-service/telemetry/ignored.go | 1 + pkg/query-service/telemetry/telemetry.go | 88 +++++++++++++++++----- 6 files changed, 130 insertions(+), 61 deletions(-) diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 5a1dcd9bd5..4d457776a1 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -5,7 +5,7 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" + "io" "net" "net/http" _ "net/http/pprof" // http profiler @@ -20,9 +20,11 @@ import ( "github.com/soheilhy/cmux" "go.signoz.io/signoz/ee/query-service/app/api" "go.signoz.io/signoz/ee/query-service/app/db" + "go.signoz.io/signoz/ee/query-service/constants" "go.signoz.io/signoz/ee/query-service/dao" "go.signoz.io/signoz/ee/query-service/interfaces" baseInterface "go.signoz.io/signoz/pkg/query-service/interfaces" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" licensepkg "go.signoz.io/signoz/ee/query-service/license" "go.signoz.io/signoz/ee/query-service/usage" @@ -198,6 +200,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { } telemetry.GetInstance().SetReader(reader) + telemetry.GetInstance().SetSaasOperator(constants.SaasSegmentKey) var c cache.Cache if serverOptions.CacheConfigPath != "" { @@ -385,20 +388,20 @@ func (lrw *loggingResponseWriter) Flush() { lrw.ResponseWriter.(http.Flusher).Flush() } -func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) { - pathToExtractBodyFrom := "/api/v2/metrics/query_range" +func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) { + pathToExtractBodyFrom := "/api/v3/query_range" data := map[string]interface{}{} - var postData *basemodel.QueryRangeParamsV2 + var postData *v3.QueryRangeParamsV3 if path == pathToExtractBodyFrom && (r.Method == "POST") { if r.Body != nil { - bodyBytes, err := ioutil.ReadAll(r.Body) + bodyBytes, err := io.ReadAll(r.Body) if err != nil { return nil, false } r.Body.Close() // must close - r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes)) + r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) json.Unmarshal(bodyBytes, &postData) } else { @@ -409,24 +412,31 @@ func extractDashboardMetaData(path string, r *http.Request) (map[string]interfac return nil, false } - signozMetricNotFound := false - + signozMetricsUsed := false + signozLogsUsed := false + dataSources := []string{} if postData != nil { - signozMetricNotFound = telemetry.GetInstance().CheckSigNozMetricsV2(postData.CompositeMetricQuery) - if postData.CompositeMetricQuery != nil { - data["queryType"] = postData.CompositeMetricQuery.QueryType - data["panelType"] = postData.CompositeMetricQuery.PanelType + if postData.CompositeQuery != nil { + data["queryType"] = postData.CompositeQuery.QueryType + data["panelType"] = postData.CompositeQuery.PanelType + + signozLogsUsed, signozMetricsUsed = telemetry.GetInstance().CheckSigNozSignals(postData) } - - data["datasource"] = postData.DataSource } - if signozMetricNotFound { - telemetry.GetInstance().AddActiveMetricsUser() - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true) + if signozMetricsUsed || signozLogsUsed { + if signozMetricsUsed { + dataSources = append(dataSources, "metrics") + telemetry.GetInstance().AddActiveMetricsUser() + } + if signozLogsUsed { + dataSources = append(dataSources, "logs") + telemetry.GetInstance().AddActiveLogsUser() + } + data["dataSources"] = dataSources + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, true) } - return data, true } @@ -449,7 +459,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { route := mux.CurrentRoute(r) path, _ := route.GetPathTemplate() - dashboardMetadata, metadataExists := extractDashboardMetaData(path, r) + queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r) getActiveLogs(path, r) lrw := NewLoggingResponseWriter(w) @@ -457,7 +467,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode} if metadataExists { - for key, value := range dashboardMetadata { + for key, value := range queryRangeV3data { data[key] = value } } diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index 4953f4d3eb..aeeea03cf2 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -10,7 +10,7 @@ const ( var LicenseSignozIo = "https://license.signoz.io/api/v1" var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "") - +var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "") var SpanLimitStr = GetOrDefaultEnv("SPAN_LIMIT", "5000") func GetOrDefaultEnv(key string, fallback string) string { diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index 08fb4e7850..b71b9bde3e 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -5,7 +5,7 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" + "io" "net" "net/http" _ "net/http/pprof" // http profiler @@ -24,6 +24,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/app/logparsingpipeline" "go.signoz.io/signoz/pkg/query-service/app/opamp" opAmpModel "go.signoz.io/signoz/pkg/query-service/app/opamp/model" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/app/explorer" "go.signoz.io/signoz/pkg/query-service/auth" @@ -320,20 +321,21 @@ func (lrw *loggingResponseWriter) WriteHeader(code int) { func (lrw *loggingResponseWriter) Flush() { lrw.ResponseWriter.(http.Flusher).Flush() } -func extractDashboardMetaData(path string, r *http.Request) (map[string]interface{}, bool) { - pathToExtractBodyFrom := "/api/v2/metrics/query_range" + +func extractQueryRangeV3Data(path string, r *http.Request) (map[string]interface{}, bool) { + pathToExtractBodyFrom := "/api/v3/query_range" data := map[string]interface{}{} - var postData *model.QueryRangeParamsV2 + var postData *v3.QueryRangeParamsV3 if path == pathToExtractBodyFrom && (r.Method == "POST") { if r.Body != nil { - bodyBytes, err := ioutil.ReadAll(r.Body) + bodyBytes, err := io.ReadAll(r.Body) if err != nil { return nil, false } r.Body.Close() // must close - r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes)) + r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) json.Unmarshal(bodyBytes, &postData) } else { @@ -344,24 +346,31 @@ func extractDashboardMetaData(path string, r *http.Request) (map[string]interfac return nil, false } - signozMetricNotFound := false - + signozMetricsUsed := false + signozLogsUsed := false + dataSources := []string{} if postData != nil { - signozMetricNotFound = telemetry.GetInstance().CheckSigNozMetricsV2(postData.CompositeMetricQuery) - if postData.CompositeMetricQuery != nil { - data["queryType"] = postData.CompositeMetricQuery.QueryType - data["panelType"] = postData.CompositeMetricQuery.PanelType + if postData.CompositeQuery != nil { + data["queryType"] = postData.CompositeQuery.QueryType + data["panelType"] = postData.CompositeQuery.PanelType + + signozLogsUsed, signozMetricsUsed = telemetry.GetInstance().CheckSigNozSignals(postData) } - - data["datasource"] = postData.DataSource } - if signozMetricNotFound { - telemetry.GetInstance().AddActiveMetricsUser() - telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_DASHBOARDS_METADATA, data, true) + if signozMetricsUsed || signozLogsUsed { + if signozMetricsUsed { + dataSources = append(dataSources, "metrics") + telemetry.GetInstance().AddActiveMetricsUser() + } + if signozLogsUsed { + dataSources = append(dataSources, "logs") + telemetry.GetInstance().AddActiveLogsUser() + } + data["dataSources"] = dataSources + telemetry.GetInstance().SendEvent(telemetry.TELEMETRY_EVENT_QUERY_RANGE_V3, data, true) } - return data, true } @@ -384,7 +393,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { route := mux.CurrentRoute(r) path, _ := route.GetPathTemplate() - dashboardMetadata, metadataExists := extractDashboardMetaData(path, r) + queryRangeV3data, metadataExists := extractQueryRangeV3Data(path, r) getActiveLogs(path, r) lrw := NewLoggingResponseWriter(w) @@ -392,7 +401,7 @@ func (s *Server) analyticsMiddleware(next http.Handler) http.Handler { data := map[string]interface{}{"path": path, "statusCode": lrw.statusCode} if metadataExists { - for key, value := range dashboardMetadata { + for key, value := range queryRangeV3data { data[key] = value } } diff --git a/pkg/query-service/dao/sqlite/connection.go b/pkg/query-service/dao/sqlite/connection.go index f79d67a122..a7335d6426 100644 --- a/pkg/query-service/dao/sqlite/connection.go +++ b/pkg/query-service/dao/sqlite/connection.go @@ -143,6 +143,7 @@ func (mds *ModelDaoSqlite) initializeOrgPreferences(ctx context.Context) error { telemetry.GetInstance().SetCountUsers(int8(countUsers)) if countUsers > 0 { telemetry.GetInstance().SetCompanyDomain(users[countUsers-1].Email) + telemetry.GetInstance().SetUserEmail(users[countUsers-1].Email) } return nil diff --git a/pkg/query-service/telemetry/ignored.go b/pkg/query-service/telemetry/ignored.go index 6ffacba848..29c06fe1ac 100644 --- a/pkg/query-service/telemetry/ignored.go +++ b/pkg/query-service/telemetry/ignored.go @@ -7,6 +7,7 @@ func IgnoredPaths() map[string]struct{} { "/api/v1/query_range": {}, "/api/v2/metrics/query_range": {}, "/api/v1/health": {}, + "/api/v1/featureFlags": {}, } return ignoredPaths diff --git a/pkg/query-service/telemetry/telemetry.go b/pkg/query-service/telemetry/telemetry.go index 4cb4f501a3..cab790d8a3 100644 --- a/pkg/query-service/telemetry/telemetry.go +++ b/pkg/query-service/telemetry/telemetry.go @@ -15,6 +15,7 @@ import ( "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/interfaces" "go.signoz.io/signoz/pkg/query-service/model" + v3 "go.signoz.io/signoz/pkg/query-service/model/v3" "go.signoz.io/signoz/pkg/query-service/version" "gopkg.in/segmentio/analytics-go.v3" ) @@ -35,7 +36,7 @@ const ( TELEMETRY_EVENT_LANGUAGE = "Language" TELEMETRY_EVENT_LOGS_FILTERS = "Logs Filters" TELEMETRY_EVENT_DISTRIBUTED = "Distributed" - TELEMETRY_EVENT_DASHBOARDS_METADATA = "Dashboards Metadata" + TELEMETRY_EVENT_QUERY_RANGE_V3 = "Query Range V3 Metadata" TELEMETRY_EVENT_ACTIVE_USER = "Active User" TELEMETRY_EVENT_ACTIVE_USER_PH = "Active User V2" ) @@ -74,28 +75,34 @@ func (a *Telemetry) IsSampled() bool { } -func (telemetry *Telemetry) CheckSigNozMetricsV2(compositeQuery *model.CompositeMetricQuery) bool { +func (telemetry *Telemetry) CheckSigNozSignals(postData *v3.QueryRangeParamsV3) (bool, bool) { + signozLogsUsed := false + signozMetricsUsed := false - signozMetricsNotFound := false - - if compositeQuery.BuilderQueries != nil && len(compositeQuery.BuilderQueries) > 0 { - if !strings.Contains(compositeQuery.BuilderQueries["A"].MetricName, "signoz_") && len(compositeQuery.BuilderQueries["A"].MetricName) > 0 { - signozMetricsNotFound = true + if postData.CompositeQuery.QueryType == v3.QueryTypeBuilder { + for _, query := range postData.CompositeQuery.BuilderQueries { + if query.DataSource == v3.DataSourceLogs && len(query.Filters.Items) > 0 { + signozLogsUsed = true + } else if query.DataSource == v3.DataSourceMetrics && + !strings.Contains(query.AggregateAttribute.Key, "signoz_") && + len(query.AggregateAttribute.Key) > 0 { + signozMetricsUsed = true + } + } + } else if postData.CompositeQuery.QueryType == v3.QueryTypePromQL { + for _, query := range postData.CompositeQuery.PromQueries { + if !strings.Contains(query.Query, "signoz_") && len(query.Query) > 0 { + signozMetricsUsed = true + } + } + } else if postData.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL { + for _, query := range postData.CompositeQuery.ClickHouseQueries { + if strings.Contains(query.Query, "signoz_metrics") && len(query.Query) > 0 { + signozMetricsUsed = true + } } } - - if compositeQuery.PromQueries != nil && len(compositeQuery.PromQueries) > 0 { - if !strings.Contains(compositeQuery.PromQueries["A"].Query, "signoz_") && len(compositeQuery.PromQueries["A"].Query) > 0 { - signozMetricsNotFound = true - } - } - if compositeQuery.ClickHouseQueries != nil && len(compositeQuery.ClickHouseQueries) > 0 { - if !strings.Contains(compositeQuery.ClickHouseQueries["A"].Query, "signoz_") && len(compositeQuery.ClickHouseQueries["A"].Query) > 0 { - signozMetricsNotFound = true - } - } - - return signozMetricsNotFound + return signozLogsUsed, signozMetricsUsed } func (telemetry *Telemetry) AddActiveTracesUser() { @@ -116,8 +123,10 @@ func (telemetry *Telemetry) AddActiveLogsUser() { type Telemetry struct { operator analytics.Client + saasOperator analytics.Client phOperator ph.Client ipAddress string + userEmail string isEnabled bool isAnonymous bool distinctId string @@ -249,10 +258,25 @@ func getOutboundIP() string { } func (a *Telemetry) IdentifyUser(user *model.User) { + if user.Email == "admin@admin.com" || user.Email == "admin@signoz.cloud" { + return + } a.SetCompanyDomain(user.Email) + a.SetUserEmail(user.Email) if !a.isTelemetryEnabled() || a.isTelemetryAnonymous() { return } + if a.saasOperator != nil { + a.saasOperator.Enqueue(analytics.Identify{ + UserId: a.userEmail, + Traits: analytics.NewTraits().SetName(user.Name).SetEmail(user.Email), + }) + a.saasOperator.Enqueue(analytics.Group{ + UserId: a.userEmail, + GroupId: a.getCompanyDomain(), + Traits: analytics.NewTraits().Set("company_domain", a.getCompanyDomain()), + }) + } a.operator.Enqueue(analytics.Identify{ UserId: a.ipAddress, @@ -272,6 +296,21 @@ func (a *Telemetry) SetCountUsers(countUsers int8) { a.countUsers = countUsers } +func (a *Telemetry) SetUserEmail(email string) { + a.userEmail = email +} + +func (a *Telemetry) GetUserEmail() string { + return a.userEmail +} + +func (a *Telemetry) SetSaasOperator(saasOperatorKey string) { + if saasOperatorKey == "" { + return + } + a.saasOperator = analytics.New(saasOperatorKey) +} + func (a *Telemetry) SetCompanyDomain(email string) { email_split := strings.Split(email, "@") @@ -342,6 +381,15 @@ func (a *Telemetry) SendEvent(event string, data map[string]interface{}, opts .. userId = a.GetDistinctId() } + if a.saasOperator != nil && a.GetUserEmail() != "" && + (event == TELEMETRY_EVENT_NUMBER_OF_SERVICES || event == TELEMETRY_EVENT_ACTIVE_USER) { + a.saasOperator.Enqueue(analytics.Track{ + Event: event, + UserId: a.GetUserEmail(), + Properties: properties, + }) + } + a.operator.Enqueue(analytics.Track{ Event: event, UserId: userId, From 377dbd8aece493023f68e41cb7cd270f54feccdd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 14:24:35 +0530 Subject: [PATCH 06/23] chore(deps): bump go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp (#3752) Bumps [go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp](https://github.com/open-telemetry/opentelemetry-go-contrib) from 0.42.0 to 0.44.0. - [Release notes](https://github.com/open-telemetry/opentelemetry-go-contrib/releases) - [Changelog](https://github.com/open-telemetry/opentelemetry-go-contrib/blob/main/CHANGELOG.md) - [Commits](https://github.com/open-telemetry/opentelemetry-go-contrib/compare/zpages/v0.42.0...zpages/v0.44.0) --- updated-dependencies: - dependency-name: go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Srikanth Chekuri --- go.mod | 8 ++++---- go.sum | 16 ++++++++-------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/go.mod b/go.mod index d4bba0fd35..8cd7a896e3 100644 --- a/go.mod +++ b/go.mod @@ -56,7 +56,7 @@ require ( go.opentelemetry.io/collector/exporter v0.79.0 go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 go.opentelemetry.io/collector/receiver v0.79.0 - go.opentelemetry.io/otel v1.17.0 + go.opentelemetry.io/otel v1.18.0 go.opentelemetry.io/otel/sdk v1.16.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.25.0 @@ -161,13 +161,13 @@ require ( go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/collector/featuregate v1.0.0-rcv0012 // indirect go.opentelemetry.io/collector/semconv v0.81.0 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.44.0 // indirect go.opentelemetry.io/contrib/propagators/b3 v1.17.0 // indirect go.opentelemetry.io/otel/bridge/opencensus v0.39.0 // indirect go.opentelemetry.io/otel/exporters/prometheus v0.39.0 // indirect - go.opentelemetry.io/otel/metric v1.17.0 // indirect + go.opentelemetry.io/otel/metric v1.18.0 // indirect go.opentelemetry.io/otel/sdk/metric v0.39.0 // indirect - go.opentelemetry.io/otel/trace v1.17.0 // indirect + go.opentelemetry.io/otel/trace v1.18.0 // indirect go.opentelemetry.io/proto/otlp v1.0.0 // indirect go.uber.org/atomic v1.11.0 // indirect go.uber.org/goleak v1.2.1 // indirect diff --git a/go.sum b/go.sum index b85b7f25aa..5196c824b5 100644 --- a/go.sum +++ b/go.sum @@ -860,26 +860,26 @@ go.opentelemetry.io/collector/receiver v0.79.0 h1:Ag4hciAYklQWDpKbnmqhfh9zJlUskW go.opentelemetry.io/collector/receiver v0.79.0/go.mod h1:+/xe0VoYl6Mli+KQTZWBR2apqFsbioAAqu7abzKDskI= go.opentelemetry.io/collector/semconv v0.81.0 h1:lCYNNo3powDvFIaTPP2jDKIrBiV1T92NK4QgL/aHYXw= go.opentelemetry.io/collector/semconv v0.81.0/go.mod h1:TlYPtzvsXyHOgr5eATi43qEMqwSmIziivJB2uctKswo= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0 h1:pginetY7+onl4qN1vl0xW/V/v6OBZ0vVdH+esuJgvmM= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.42.0/go.mod h1:XiYsayHc36K3EByOO6nbAXnAWbrUxdjUROCEeeROOH8= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.44.0 h1:KfYpVmrjI7JuToy5k8XV3nkapjWx48k4E4JOtVstzQI= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.44.0/go.mod h1:SeQhzAEccGVZVEy7aH87Nh0km+utSpo1pTv6eMMop48= go.opentelemetry.io/contrib/propagators/b3 v1.17.0 h1:ImOVvHnku8jijXqkwCSyYKRDt2YrnGXD4BbhcpfbfJo= go.opentelemetry.io/contrib/propagators/b3 v1.17.0/go.mod h1:IkfUfMpKWmynvvE0264trz0sf32NRTZL4nuAN9AbWRc= go.opentelemetry.io/contrib/zpages v0.42.0 h1:hFscXKQ9PTjyIVmAr6zIV8cMoiEeR9lPIwPVqHi8+5Q= go.opentelemetry.io/contrib/zpages v0.42.0/go.mod h1:qRJBEfB0iwRKrYImq5qfwTolmY8HXvZBRucvhuTVQZw= -go.opentelemetry.io/otel v1.17.0 h1:MW+phZ6WZ5/uk2nd93ANk/6yJ+dVrvNWUjGhnnFU5jM= -go.opentelemetry.io/otel v1.17.0/go.mod h1:I2vmBGtFaODIVMBSTPVDlJSzBDNf93k60E6Ft0nyjo0= +go.opentelemetry.io/otel v1.18.0 h1:TgVozPGZ01nHyDZxK5WGPFB9QexeTMXEH7+tIClWfzs= +go.opentelemetry.io/otel v1.18.0/go.mod h1:9lWqYO0Db579XzVuCKFNPDl4s73Voa+zEck3wHaAYQI= go.opentelemetry.io/otel/bridge/opencensus v0.39.0 h1:YHivttTaDhbZIHuPlg1sWsy2P5gj57vzqPfkHItgbwQ= go.opentelemetry.io/otel/bridge/opencensus v0.39.0/go.mod h1:vZ4537pNjFDXEx//WldAR6Ro2LC8wwmFC76njAXwNPE= go.opentelemetry.io/otel/exporters/prometheus v0.39.0 h1:whAaiHxOatgtKd+w0dOi//1KUxj3KoPINZdtDaDj3IA= go.opentelemetry.io/otel/exporters/prometheus v0.39.0/go.mod h1:4jo5Q4CROlCpSPsXLhymi+LYrDXd2ObU5wbKayfZs7Y= -go.opentelemetry.io/otel/metric v1.17.0 h1:iG6LGVz5Gh+IuO0jmgvpTB6YVrCGngi8QGm+pMd8Pdc= -go.opentelemetry.io/otel/metric v1.17.0/go.mod h1:h4skoxdZI17AxwITdmdZjjYJQH5nzijUUjm+wtPph5o= +go.opentelemetry.io/otel/metric v1.18.0 h1:JwVzw94UYmbx3ej++CwLUQZxEODDj/pOuTCvzhtRrSQ= +go.opentelemetry.io/otel/metric v1.18.0/go.mod h1:nNSpsVDjWGfb7chbRLUNW+PBNdcSTHD4Uu5pfFMOI0k= go.opentelemetry.io/otel/sdk v1.16.0 h1:Z1Ok1YsijYL0CSJpHt4cS3wDDh7p572grzNrBMiMWgE= go.opentelemetry.io/otel/sdk v1.16.0/go.mod h1:tMsIuKXuuIWPBAOrH+eHtvhTL+SntFtXF9QD68aP6p4= go.opentelemetry.io/otel/sdk/metric v0.39.0 h1:Kun8i1eYf48kHH83RucG93ffz0zGV1sh46FAScOTuDI= go.opentelemetry.io/otel/sdk/metric v0.39.0/go.mod h1:piDIRgjcK7u0HCL5pCA4e74qpK/jk3NiUoAHATVAmiI= -go.opentelemetry.io/otel/trace v1.17.0 h1:/SWhSRHmDPOImIAetP1QAeMnZYiQXrTy4fMMYOdSKWQ= -go.opentelemetry.io/otel/trace v1.17.0/go.mod h1:I/4vKTgFclIsXRVucpH25X0mpFSczM7aHeaz0ZBLWjY= +go.opentelemetry.io/otel/trace v1.18.0 h1:NY+czwbHbmndxojTEKiSMHkG2ClNH2PwmcHrdo0JY10= +go.opentelemetry.io/otel/trace v1.18.0/go.mod h1:T2+SGJGuYZY3bjj5rgh/hN7KIrlpWC5nS8Mjvzckz+0= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= From f7fe64a8df5afd5aca46428bc55f5a3e5159bea8 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Tue, 17 Oct 2023 16:54:37 +0530 Subject: [PATCH 07/23] feat: add analytics group call (#3757) * feat: add analytics group call * feat: add safety check for billing breakdown variable --- frontend/src/AppRoutes/index.tsx | 50 ++++++++++++++----- .../BillingContainer/BillingContainer.tsx | 36 ++++++------- .../IngestionSettings/IngestionSettings.tsx | 16 ++++-- frontend/src/container/SideNav/SideNav.tsx | 10 ++-- frontend/src/pages/Settings/config.ts | 3 ++ frontend/src/pages/Settings/utils.ts | 11 +++- frontend/src/pages/SignUp/SignUp.tsx | 9 +--- frontend/src/utils/app.ts | 14 ++++++ 8 files changed, 101 insertions(+), 48 deletions(-) diff --git a/frontend/src/AppRoutes/index.tsx b/frontend/src/AppRoutes/index.tsx index a2330d1aeb..9367ed1624 100644 --- a/frontend/src/AppRoutes/index.tsx +++ b/frontend/src/AppRoutes/index.tsx @@ -22,7 +22,8 @@ import { Dispatch } from 'redux'; import { AppState } from 'store/reducers'; import AppActions from 'types/actions'; import { UPDATE_FEATURE_FLAG_RESPONSE } from 'types/actions/app'; -import AppReducer from 'types/reducer/app'; +import AppReducer, { User } from 'types/reducer/app'; +import { extractDomain, isCloudUser } from 'utils/app'; import { trackPageView } from 'utils/segmentAnalytics'; import PrivateRoute from './Private'; @@ -32,7 +33,7 @@ function App(): JSX.Element { const themeConfig = useThemeConfig(); const { data } = useLicense(); const [routes, setRoutes] = useState(defaultRoutes); - const { role, isLoggedIn: isLoggedInState, user } = useSelector< + const { role, isLoggedIn: isLoggedInState, user, org } = useSelector< AppState, AppReducer >((state) => state.app); @@ -41,6 +42,8 @@ function App(): JSX.Element { const { hostname, pathname } = window.location; + const isCloudUserVal = isCloudUser(); + const featureResponse = useGetFeatureFlag((allFlags) => { const isOnboardingEnabled = allFlags.find((flag) => flag.name === FeatureKeys.ONBOARDING)?.active || @@ -58,10 +61,7 @@ function App(): JSX.Element { }, }); - if ( - !isOnboardingEnabled || - !(hostname && hostname.endsWith('signoz.cloud')) - ) { + if (!isOnboardingEnabled || !isCloudUserVal) { const newRoutes = routes.filter( (route) => route?.path !== ROUTES.GET_STARTED, ); @@ -86,6 +86,35 @@ function App(): JSX.Element { license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, ) || data?.payload?.licenses === null; + const enableAnalytics = (user: User): void => { + const orgName = + org && Array.isArray(org) && org.length > 0 ? org[0].name : ''; + + const identifyPayload = { + email: user?.email, + name: user?.name, + company_name: orgName, + role, + }; + const domain = extractDomain(user?.email); + + const hostNameParts = hostname.split('.'); + + const groupTraits = { + name: orgName, + tenant_id: hostNameParts[0], + data_region: hostNameParts[1], + tenant_url: hostname, + company_domain: domain, + }; + + window.analytics.identify(user?.email, identifyPayload); + + window.analytics.group(domain, groupTraits); + + window.clarity('identify', user.email, user.name); + }; + useEffect(() => { const isIdentifiedUser = getLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER); @@ -98,12 +127,9 @@ function App(): JSX.Element { ) { setLocalStorageApi(LOCALSTORAGE.IS_IDENTIFIED_USER, 'true'); - window.analytics.identify(user?.email, { - email: user?.email, - name: user?.name, - }); - - window.clarity('identify', user.email, user.name); + if (isCloudUserVal) { + enableAnalytics(user); + } } if (isOnBasicPlan || (isLoggedInState && role && role !== 'ADMIN')) { diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index aa674e780a..f47af4ac0a 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -176,24 +176,26 @@ export default function BillingContainer(): JSX.Element { } = data?.payload || {}; const formattedUsageData: any[] = []; - for (let index = 0; index < breakdown.length; index += 1) { - const element = breakdown[index]; + if (breakdown && Array.isArray(breakdown)) { + for (let index = 0; index < breakdown.length; index += 1) { + const element = breakdown[index]; - element?.tiers.forEach( - ( - tier: { quantity: number; unitPrice: number; tierCost: number }, - i: number, - ) => { - formattedUsageData.push({ - key: `${index}${i}`, - name: i === 0 ? element?.type : '', - unit: element?.unit, - dataIngested: tier.quantity, - pricePerUnit: tier.unitPrice, - cost: `$ ${tier.tierCost}`, - }); - }, - ); + element?.tiers.forEach( + ( + tier: { quantity: number; unitPrice: number; tierCost: number }, + i: number, + ) => { + formattedUsageData.push({ + key: `${index}${i}`, + name: i === 0 ? element?.type : '', + unit: element?.unit, + dataIngested: tier.quantity, + pricePerUnit: tier.unitPrice, + cost: `$ ${tier.tierCost}`, + }); + }, + ); + } } setData(formattedUsageData); diff --git a/frontend/src/container/IngestionSettings/IngestionSettings.tsx b/frontend/src/container/IngestionSettings/IngestionSettings.tsx index 0971ecc960..354ce155ff 100644 --- a/frontend/src/container/IngestionSettings/IngestionSettings.tsx +++ b/frontend/src/container/IngestionSettings/IngestionSettings.tsx @@ -1,6 +1,6 @@ import './IngestionSettings.styles.scss'; -import { Table, Typography } from 'antd'; +import { Skeleton, Table, Typography } from 'antd'; import type { ColumnsType } from 'antd/es/table'; import getIngestionData from 'api/settings/getIngestionData'; import { useQuery } from 'react-query'; @@ -12,7 +12,7 @@ import AppReducer from 'types/reducer/app'; export default function IngestionSettings(): JSX.Element { const { user } = useSelector((state) => state.app); - const { data: ingestionData } = useQuery({ + const { data: ingestionData, isFetching } = useQuery({ queryFn: getIngestionData, queryKey: ['getIngestionData', user?.userId], }); @@ -25,11 +25,19 @@ export default function IngestionSettings(): JSX.Element { render: (text): JSX.Element => {text} , }, { - title: 'Value', + title: '', dataIndex: 'value', key: 'value', render: (text): JSX.Element => ( - {text} +
+ {isFetching ? ( + + ) : ( + + {text} + + )} +
), }, ]; diff --git a/frontend/src/container/SideNav/SideNav.tsx b/frontend/src/container/SideNav/SideNav.tsx index 85ca6295ee..e0d32f8606 100644 --- a/frontend/src/container/SideNav/SideNav.tsx +++ b/frontend/src/container/SideNav/SideNav.tsx @@ -13,6 +13,7 @@ import { useLocation } from 'react-router-dom'; import { sideBarCollapse } from 'store/actions/app'; import { AppState } from 'store/reducers'; import AppReducer from 'types/reducer/app'; +import { isCloudUser } from 'utils/app'; import { routeConfig, styles } from './config'; import { getQueryString } from './helper'; @@ -50,8 +51,6 @@ function SideNav(): JSX.Element { license.isCurrent && license.planKey === LICENSE_PLAN_KEY.BASIC_PLAN, ) || data?.payload?.licenses === null; - const { hostname } = window.location; - const menuItems = useMemo( () => defaultMenuItems.filter((item) => { @@ -64,16 +63,13 @@ function SideNav(): JSX.Element { return item.key !== ROUTES.BILLING; } - if ( - !isOnboardingEnabled || - !(hostname && hostname.endsWith('signoz.cloud')) - ) { + if (!isOnboardingEnabled || !isCloudUser()) { return item.key !== ROUTES.GET_STARTED; } return true; }), - [featureResponse.data, isOnBasicPlan, hostname, role], + [featureResponse.data, isOnBasicPlan, role], ); const { pathname, search } = useLocation(); diff --git a/frontend/src/pages/Settings/config.ts b/frontend/src/pages/Settings/config.ts index a24a3a5097..26fb1805fa 100644 --- a/frontend/src/pages/Settings/config.ts +++ b/frontend/src/pages/Settings/config.ts @@ -19,6 +19,9 @@ export const commonRoutes = (t: TFunction): RouteTabProps['routes'] => [ route: ROUTES.ALL_CHANNELS, key: ROUTES.ALL_CHANNELS, }, +]; + +export const ingestionSettings = (t: TFunction): RouteTabProps['routes'] => [ { Component: IngestionSettings, name: t('routes:ingestion_settings').toString(), diff --git a/frontend/src/pages/Settings/utils.ts b/frontend/src/pages/Settings/utils.ts index 8c965b0040..862ee0adb9 100644 --- a/frontend/src/pages/Settings/utils.ts +++ b/frontend/src/pages/Settings/utils.ts @@ -1,7 +1,12 @@ import { RouteTabProps } from 'components/RouteTab/types'; import { TFunction } from 'i18next'; +import { isCloudUser } from 'utils/app'; -import { commonRoutes, organizationSettings } from './config'; +import { + commonRoutes, + ingestionSettings, + organizationSettings, +} from './config'; export const getRoutes = ( isCurrentOrgSettings: boolean, @@ -13,5 +18,9 @@ export const getRoutes = ( common = [...common, ...organizationSettings(t)]; } + if (isCloudUser()) { + common = [...common, ...ingestionSettings(t)]; + } + return common; }; diff --git a/frontend/src/pages/SignUp/SignUp.tsx b/frontend/src/pages/SignUp/SignUp.tsx index 910ff91b63..4a5a3e8cc6 100644 --- a/frontend/src/pages/SignUp/SignUp.tsx +++ b/frontend/src/pages/SignUp/SignUp.tsx @@ -17,6 +17,7 @@ import { useLocation } from 'react-router-dom'; import { SuccessResponse } from 'types/api'; import { PayloadProps } from 'types/api/user/getUser'; import { PayloadProps as LoginPrecheckPayloadProps } from 'types/api/user/loginPrecheck'; +import { isCloudUser } from 'utils/app'; import { trackEvent } from 'utils/segmentAnalytics'; import { @@ -233,8 +234,6 @@ function SignUp({ version }: SignUpProps): JSX.Element { const handleSubmit = (): void => { (async (): Promise => { - const { hostname } = window.location; - try { const values = form.getFieldsValue(); setLoading(true); @@ -260,11 +259,7 @@ function SignUp({ version }: SignUpProps): JSX.Element { await commonHandler( values, async (): Promise => { - if ( - isOnboardingEnabled && - hostname && - hostname.endsWith('signoz.cloud') - ) { + if (isOnboardingEnabled && isCloudUser()) { history.push(ROUTES.GET_STARTED); } else { history.push(ROUTES.APPLICATION); diff --git a/frontend/src/utils/app.ts b/frontend/src/utils/app.ts index 890fd64602..b87212ea69 100644 --- a/frontend/src/utils/app.ts +++ b/frontend/src/utils/app.ts @@ -3,3 +3,17 @@ import { SKIP_ONBOARDING } from 'constants/onboarding'; export const isOnboardingSkipped = (): boolean => getLocalStorage(SKIP_ONBOARDING) === 'true'; + +export function extractDomain(email: string): string { + const emailParts = email.split('@'); + if (emailParts.length !== 2) { + return email; + } + return emailParts[1]; +} + +export const isCloudUser = (): boolean => { + const { hostname } = window.location; + + return hostname?.endsWith('signoz.cloud'); +}; From e0b83bda628d308c854a7c13b62415827f85f2df Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Tue, 17 Oct 2023 17:50:54 +0000 Subject: [PATCH 08/23] feat: add created{By,At} , updated{By,At} to alerts/dashboards (#3754) --- pkg/query-service/app/auth.go | 12 +++- pkg/query-service/app/dashboards/model.go | 72 +++++++++++++++---- pkg/query-service/app/dashboards/provision.go | 5 +- pkg/query-service/app/http_handler.go | 28 ++++---- pkg/query-service/common/user.go | 16 +++++ pkg/query-service/constants/constants.go | 4 ++ pkg/query-service/rules/apiParams.go | 4 ++ pkg/query-service/rules/db.go | 59 +++++++++------ pkg/query-service/rules/manager.go | 49 +++++++------ 9 files changed, 176 insertions(+), 73 deletions(-) create mode 100644 pkg/query-service/common/user.go diff --git a/pkg/query-service/app/auth.go b/pkg/query-service/app/auth.go index dccf6dd8dd..f771a7cbfe 100644 --- a/pkg/query-service/app/auth.go +++ b/pkg/query-service/app/auth.go @@ -1,11 +1,13 @@ package app import ( + "context" "errors" "net/http" "github.com/gorilla/mux" "go.signoz.io/signoz/pkg/query-service/auth" + "go.signoz.io/signoz/pkg/query-service/constants" "go.signoz.io/signoz/pkg/query-service/model" ) @@ -39,10 +41,12 @@ func (am *AuthMiddleware) ViewAccess(f func(http.ResponseWriter, *http.Request)) if !(auth.IsViewer(user) || auth.IsEditor(user) || auth.IsAdmin(user)) { RespondError(w, &model.ApiError{ Typ: model.ErrorForbidden, - Err: errors.New("API is accessible to viewers/editors/admins."), + Err: errors.New("API is accessible to viewers/editors/admins"), }, nil) return } + ctx := context.WithValue(r.Context(), constants.ContextUserKey, user) + r = r.WithContext(ctx) f(w, r) } } @@ -64,6 +68,8 @@ func (am *AuthMiddleware) EditAccess(f func(http.ResponseWriter, *http.Request)) }, nil) return } + ctx := context.WithValue(r.Context(), constants.ContextUserKey, user) + r = r.WithContext(ctx) f(w, r) } } @@ -86,6 +92,8 @@ func (am *AuthMiddleware) SelfAccess(f func(http.ResponseWriter, *http.Request)) }, nil) return } + ctx := context.WithValue(r.Context(), constants.ContextUserKey, user) + r = r.WithContext(ctx) f(w, r) } } @@ -107,6 +115,8 @@ func (am *AuthMiddleware) AdminAccess(f func(http.ResponseWriter, *http.Request) }, nil) return } + ctx := context.WithValue(r.Context(), constants.ContextUserKey, user) + r = r.WithContext(ctx) f(w, r) } } diff --git a/pkg/query-service/app/dashboards/model.go b/pkg/query-service/app/dashboards/model.go index e97facff62..fa51864d4c 100644 --- a/pkg/query-service/app/dashboards/model.go +++ b/pkg/query-service/app/dashboards/model.go @@ -1,6 +1,7 @@ package dashboards import ( + "context" "encoding/base64" "encoding/json" "fmt" @@ -14,6 +15,7 @@ import ( "github.com/gosimple/slug" "github.com/jmoiron/sqlx" "github.com/mitchellh/mapstructure" + "go.signoz.io/signoz/pkg/query-service/common" "go.signoz.io/signoz/pkg/query-service/interfaces" "go.signoz.io/signoz/pkg/query-service/model" "go.uber.org/zap" @@ -95,6 +97,37 @@ func InitDB(dataSourceName string) (*sqlx.DB, error) { return nil, fmt.Errorf("error in creating ttl_status table: %s", err.Error()) } + // sqlite does not support "IF NOT EXISTS" + createdAt := `ALTER TABLE rules ADD COLUMN created_at datetime;` + _, err = db.Exec(createdAt) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return nil, fmt.Errorf("error in adding column created_at to rules table: %s", err.Error()) + } + + createdBy := `ALTER TABLE rules ADD COLUMN created_by TEXT;` + _, err = db.Exec(createdBy) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return nil, fmt.Errorf("error in adding column created_by to rules table: %s", err.Error()) + } + + updatedBy := `ALTER TABLE rules ADD COLUMN updated_by TEXT;` + _, err = db.Exec(updatedBy) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return nil, fmt.Errorf("error in adding column updated_by to rules table: %s", err.Error()) + } + + createdBy = `ALTER TABLE dashboards ADD COLUMN created_by TEXT;` + _, err = db.Exec(createdBy) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return nil, fmt.Errorf("error in adding column created_by to dashboards table: %s", err.Error()) + } + + updatedBy = `ALTER TABLE dashboards ADD COLUMN updated_by TEXT;` + _, err = db.Exec(updatedBy) + if err != nil && !strings.Contains(err.Error(), "duplicate column name") { + return nil, fmt.Errorf("error in adding column updated_by to dashboards table: %s", err.Error()) + } + return db, nil } @@ -103,7 +136,9 @@ type Dashboard struct { Uuid string `json:"uuid" db:"uuid"` Slug string `json:"-" db:"-"` CreatedAt time.Time `json:"created_at" db:"created_at"` + CreateBy *string `json:"created_by" db:"created_by"` UpdatedAt time.Time `json:"updated_at" db:"updated_at"` + UpdateBy *string `json:"updated_by" db:"updated_by"` Title string `json:"-" db:"-"` Data Data `json:"data" db:"data"` } @@ -132,16 +167,22 @@ func (c *Data) Scan(src interface{}) error { } // CreateDashboard creates a new dashboard -func CreateDashboard(data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) { +func CreateDashboard(ctx context.Context, data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) { dash := &Dashboard{ Data: data, } + var userEmail string + if user := common.GetUserFromContext(ctx); user != nil { + userEmail = user.Email + } dash.CreatedAt = time.Now() + dash.CreateBy = &userEmail dash.UpdatedAt = time.Now() + dash.UpdateBy = &userEmail dash.UpdateSlug() dash.Uuid = uuid.New().String() - map_data, err := json.Marshal(dash.Data) + mapData, err := json.Marshal(dash.Data) if err != nil { zap.S().Errorf("Error in marshalling data field in dashboard: ", dash, err) return nil, &model.ApiError{Typ: model.ErrorExec, Err: err} @@ -155,8 +196,8 @@ func CreateDashboard(data map[string]interface{}, fm interfaces.FeatureLookup) ( } } - // db.Prepare("Insert into dashboards where") - result, err := db.Exec("INSERT INTO dashboards (uuid, created_at, updated_at, data) VALUES ($1, $2, $3, $4)", dash.Uuid, dash.CreatedAt, dash.UpdatedAt, map_data) + result, err := db.Exec("INSERT INTO dashboards (uuid, created_at, created_by, updated_at, updated_by, data) VALUES ($1, $2, $3, $4, $5, $6)", + dash.Uuid, dash.CreatedAt, userEmail, dash.UpdatedAt, userEmail, mapData) if err != nil { zap.S().Errorf("Error in inserting dashboard data: ", dash, err) @@ -177,7 +218,7 @@ func CreateDashboard(data map[string]interface{}, fm interfaces.FeatureLookup) ( return dash, nil } -func GetDashboards() ([]Dashboard, *model.ApiError) { +func GetDashboards(ctx context.Context) ([]Dashboard, *model.ApiError) { dashboards := []Dashboard{} query := `SELECT * FROM dashboards` @@ -190,9 +231,9 @@ func GetDashboards() ([]Dashboard, *model.ApiError) { return dashboards, nil } -func DeleteDashboard(uuid string, fm interfaces.FeatureLookup) *model.ApiError { +func DeleteDashboard(ctx context.Context, uuid string, fm interfaces.FeatureLookup) *model.ApiError { - dashboard, dErr := GetDashboard(uuid) + dashboard, dErr := GetDashboard(ctx, uuid) if dErr != nil { zap.S().Errorf("Error in getting dashboard: ", uuid, dErr) return dErr @@ -222,7 +263,7 @@ func DeleteDashboard(uuid string, fm interfaces.FeatureLookup) *model.ApiError { return nil } -func GetDashboard(uuid string) (*Dashboard, *model.ApiError) { +func GetDashboard(ctx context.Context, uuid string) (*Dashboard, *model.ApiError) { dashboard := Dashboard{} query := `SELECT * FROM dashboards WHERE uuid=?` @@ -235,15 +276,15 @@ func GetDashboard(uuid string) (*Dashboard, *model.ApiError) { return &dashboard, nil } -func UpdateDashboard(uuid string, data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) { +func UpdateDashboard(ctx context.Context, uuid string, data map[string]interface{}, fm interfaces.FeatureLookup) (*Dashboard, *model.ApiError) { - map_data, err := json.Marshal(data) + mapData, err := json.Marshal(data) if err != nil { zap.S().Errorf("Error in marshalling data field in dashboard: ", data, err) return nil, &model.ApiError{Typ: model.ErrorBadData, Err: err} } - dashboard, apiErr := GetDashboard(uuid) + dashboard, apiErr := GetDashboard(ctx, uuid) if apiErr != nil { return nil, apiErr } @@ -265,10 +306,15 @@ func UpdateDashboard(uuid string, data map[string]interface{}, fm interfaces.Fea } dashboard.UpdatedAt = time.Now() + var userEmail string + if user := common.GetUserFromContext(ctx); user != nil { + userEmail = user.Email + } + dashboard.UpdateBy = &userEmail dashboard.Data = data - // db.Prepare("Insert into dashboards where") - _, err = db.Exec("UPDATE dashboards SET updated_at=$1, data=$2 WHERE uuid=$3 ", dashboard.UpdatedAt, map_data, dashboard.Uuid) + _, err = db.Exec("UPDATE dashboards SET updated_at=$1, updated_by=$2, data=$3 WHERE uuid=$4;", + dashboard.UpdatedAt, userEmail, mapData, dashboard.Uuid) if err != nil { zap.S().Errorf("Error in inserting dashboard data: ", data, err) diff --git a/pkg/query-service/app/dashboards/provision.go b/pkg/query-service/app/dashboards/provision.go index d8869f048b..fa2a935b58 100644 --- a/pkg/query-service/app/dashboards/provision.go +++ b/pkg/query-service/app/dashboards/provision.go @@ -1,6 +1,7 @@ package dashboards import ( + "context" "encoding/json" "io/ioutil" "os" @@ -38,13 +39,13 @@ func readCurrentDir(dir string, fm interfaces.FeatureLookup) error { continue } - _, apiErr := GetDashboard(data["uuid"].(string)) + _, apiErr := GetDashboard(context.Background(), data["uuid"].(string)) if apiErr == nil { zap.S().Infof("Creating Dashboards: Error in file: %s\t%s", filename, "Dashboard already present in database") continue } - _, apiErr = CreateDashboard(data, fm) + _, apiErr = CreateDashboard(context.Background(), data, fm) if apiErr != nil { zap.S().Errorf("Creating Dashboards: Error in file: %s\t%s", filename, apiErr.Err) continue diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 87f05ab098..eedaf0b11d 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -448,7 +448,7 @@ func Intersection(a, b []int) (c []int) { func (aH *APIHandler) getRule(w http.ResponseWriter, r *http.Request) { id := mux.Vars(r)["id"] - ruleResponse, err := aH.ruleManager.GetRule(id) + ruleResponse, err := aH.ruleManager.GetRule(r.Context(), id) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) return @@ -765,7 +765,7 @@ func (aH *APIHandler) QueryRangeMetricsV2(w http.ResponseWriter, r *http.Request func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) { - rules, err := aH.ruleManager.ListRuleStates() + rules, err := aH.ruleManager.ListRuleStates(r.Context()) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) return @@ -778,7 +778,7 @@ func (aH *APIHandler) listRules(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) { - allDashboards, err := dashboards.GetDashboards() + allDashboards, err := dashboards.GetDashboards(r.Context()) if err != nil { RespondError(w, err, nil) @@ -829,7 +829,7 @@ func (aH *APIHandler) getDashboards(w http.ResponseWriter, r *http.Request) { func (aH *APIHandler) deleteDashboard(w http.ResponseWriter, r *http.Request) { uuid := mux.Vars(r)["uuid"] - err := dashboards.DeleteDashboard(uuid, aH.featureFlags) + err := dashboards.DeleteDashboard(r.Context(), uuid, aH.featureFlags) if err != nil { RespondError(w, err, nil) @@ -935,7 +935,7 @@ func (aH *APIHandler) updateDashboard(w http.ResponseWriter, r *http.Request) { return } - dashboard, apiError := dashboards.UpdateDashboard(uuid, postData, aH.featureFlags) + dashboard, apiError := dashboards.UpdateDashboard(r.Context(), uuid, postData, aH.featureFlags) if apiError != nil { RespondError(w, apiError, nil) return @@ -949,7 +949,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) { uuid := mux.Vars(r)["uuid"] - dashboard, apiError := dashboards.GetDashboard(uuid) + dashboard, apiError := dashboards.GetDashboard(r.Context(), uuid) if apiError != nil { RespondError(w, apiError, nil) @@ -960,7 +960,7 @@ func (aH *APIHandler) getDashboard(w http.ResponseWriter, r *http.Request) { } -func (aH *APIHandler) saveAndReturn(w http.ResponseWriter, signozDashboard model.DashboardData) { +func (aH *APIHandler) saveAndReturn(w http.ResponseWriter, r *http.Request, signozDashboard model.DashboardData) { toSave := make(map[string]interface{}) toSave["title"] = signozDashboard.Title toSave["description"] = signozDashboard.Description @@ -969,7 +969,7 @@ func (aH *APIHandler) saveAndReturn(w http.ResponseWriter, signozDashboard model toSave["widgets"] = signozDashboard.Widgets toSave["variables"] = signozDashboard.Variables - dashboard, apiError := dashboards.CreateDashboard(toSave, aH.featureFlags) + dashboard, apiError := dashboards.CreateDashboard(r.Context(), toSave, aH.featureFlags) if apiError != nil { RespondError(w, apiError, nil) return @@ -988,7 +988,7 @@ func (aH *APIHandler) createDashboardsTransform(w http.ResponseWriter, r *http.R err = json.Unmarshal(b, &importData) if err == nil { signozDashboard := dashboards.TransformGrafanaJSONToSignoz(importData) - aH.saveAndReturn(w, signozDashboard) + aH.saveAndReturn(w, r, signozDashboard) return } RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, "Error while creating dashboard from grafana json") @@ -1010,7 +1010,7 @@ func (aH *APIHandler) createDashboards(w http.ResponseWriter, r *http.Request) { return } - dash, apiErr := dashboards.CreateDashboard(postData, aH.featureFlags) + dash, apiErr := dashboards.CreateDashboard(r.Context(), postData, aH.featureFlags) if apiErr != nil { RespondError(w, apiErr, nil) @@ -1051,7 +1051,7 @@ func (aH *APIHandler) deleteRule(w http.ResponseWriter, r *http.Request) { id := mux.Vars(r)["id"] - err := aH.ruleManager.DeleteRule(id) + err := aH.ruleManager.DeleteRule(r.Context(), id) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) @@ -1074,7 +1074,7 @@ func (aH *APIHandler) patchRule(w http.ResponseWriter, r *http.Request) { return } - gettableRule, err := aH.ruleManager.PatchRule(string(body), id) + gettableRule, err := aH.ruleManager.PatchRule(r.Context(), string(body), id) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) @@ -1095,7 +1095,7 @@ func (aH *APIHandler) editRule(w http.ResponseWriter, r *http.Request) { return } - err = aH.ruleManager.EditRule(string(body), id) + err = aH.ruleManager.EditRule(r.Context(), string(body), id) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil) @@ -1248,7 +1248,7 @@ func (aH *APIHandler) createRule(w http.ResponseWriter, r *http.Request) { return } - err = aH.ruleManager.CreateRule(string(body)) + err = aH.ruleManager.CreateRule(r.Context(), string(body)) if err != nil { RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil) return diff --git a/pkg/query-service/common/user.go b/pkg/query-service/common/user.go new file mode 100644 index 0000000000..ecfc519fc0 --- /dev/null +++ b/pkg/query-service/common/user.go @@ -0,0 +1,16 @@ +package common + +import ( + "context" + + "go.signoz.io/signoz/pkg/query-service/constants" + "go.signoz.io/signoz/pkg/query-service/model" +) + +func GetUserFromContext(ctx context.Context) *model.UserPayload { + user, ok := ctx.Value(constants.ContextUserKey).(*model.UserPayload) + if !ok { + return nil + } + return user +} diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 0f181c36fd..3027ecaee2 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -17,6 +17,10 @@ const ( OpAmpWsEndpoint = "0.0.0.0:4320" // address for opamp websocket ) +type ContextKey string + +const ContextUserKey ContextKey = "user" + var ConfigSignozIo = "https://config.signoz.io/api/v1" var DEFAULT_TELEMETRY_ANONYMOUS = false diff --git a/pkg/query-service/rules/apiParams.go b/pkg/query-service/rules/apiParams.go index f33af06e81..300eac330f 100644 --- a/pkg/query-service/rules/apiParams.go +++ b/pkg/query-service/rules/apiParams.go @@ -241,4 +241,8 @@ type GettableRule struct { Id string `json:"id"` State string `json:"state"` PostableRule + CreatedAt *time.Time `json:"createAt"` + CreatedBy *string `json:"createBy"` + UpdatedAt *time.Time `json:"updateAt"` + UpdatedBy *string `json:"updateBy"` } diff --git a/pkg/query-service/rules/db.go b/pkg/query-service/rules/db.go index 7070f23346..cc3a33f953 100644 --- a/pkg/query-service/rules/db.go +++ b/pkg/query-service/rules/db.go @@ -1,35 +1,41 @@ package rules import ( + "context" "fmt" - "github.com/jmoiron/sqlx" - "go.uber.org/zap" "strconv" "time" + + "github.com/jmoiron/sqlx" + "go.signoz.io/signoz/pkg/query-service/common" + "go.uber.org/zap" ) // Data store to capture user alert rule settings type RuleDB interface { // CreateRuleTx stores rule in the db and returns tx and group name (on success) - CreateRuleTx(rule string) (string, Tx, error) + CreateRuleTx(ctx context.Context, rule string) (string, Tx, error) // EditRuleTx updates the given rule in the db and returns tx and group name (on success) - EditRuleTx(rule string, id string) (string, Tx, error) + EditRuleTx(ctx context.Context, rule string, id string) (string, Tx, error) // DeleteRuleTx deletes the given rule in the db and returns tx and group name (on success) - DeleteRuleTx(id string) (string, Tx, error) + DeleteRuleTx(ctx context.Context, id string) (string, Tx, error) // GetStoredRules fetches the rule definitions from db - GetStoredRules() ([]StoredRule, error) + GetStoredRules(ctx context.Context) ([]StoredRule, error) // GetStoredRule for a given ID from DB - GetStoredRule(id string) (*StoredRule, error) + GetStoredRule(ctx context.Context, id string) (*StoredRule, error) } type StoredRule struct { - Id int `json:"id" db:"id"` - UpdatedAt time.Time `json:"updated_at" db:"updated_at"` - Data string `json:"data" db:"data"` + Id int `json:"id" db:"id"` + CreatedAt *time.Time `json:"created_at" db:"created_at"` + CreatedBy *string `json:"created_by" db:"created_by"` + UpdatedAt *time.Time `json:"updated_at" db:"updated_at"` + UpdatedBy *string `json:"updated_by" db:"updated_by"` + Data string `json:"data" db:"data"` } type Tx interface { @@ -51,17 +57,23 @@ func newRuleDB(db *sqlx.DB) RuleDB { // CreateRuleTx stores a given rule in db and returns task name, // sql tx and error (if any) -func (r *ruleDB) CreateRuleTx(rule string) (string, Tx, error) { +func (r *ruleDB) CreateRuleTx(ctx context.Context, rule string) (string, Tx, error) { var groupName string var lastInsertId int64 + var userEmail string + if user := common.GetUserFromContext(ctx); user != nil { + userEmail = user.Email + } + createdAt := time.Now() + updatedAt := time.Now() tx, err := r.Begin() if err != nil { return groupName, nil, err } - stmt, err := tx.Prepare(`INSERT into rules (updated_at, data) VALUES($1,$2);`) + stmt, err := tx.Prepare(`INSERT into rules (created_at, created_by, updated_at, updated_by, data) VALUES($1,$2,$3,$4,$5);`) if err != nil { zap.S().Errorf("Error in preparing statement for INSERT to rules\n", err) tx.Rollback() @@ -70,7 +82,7 @@ func (r *ruleDB) CreateRuleTx(rule string) (string, Tx, error) { defer stmt.Close() - result, err := stmt.Exec(time.Now(), rule) + result, err := stmt.Exec(createdAt, userEmail, updatedAt, userEmail, rule) if err != nil { zap.S().Errorf("Error in Executing prepared statement for INSERT to rules\n", err) tx.Rollback() // return an error too, we may want to wrap them @@ -87,7 +99,7 @@ func (r *ruleDB) CreateRuleTx(rule string) (string, Tx, error) { // EditRuleTx stores a given rule string in database and returns // task name, sql tx and error (if any) -func (r *ruleDB) EditRuleTx(rule string, id string) (string, Tx, error) { +func (r *ruleDB) EditRuleTx(ctx context.Context, rule string, id string) (string, Tx, error) { var groupName string idInt, _ := strconv.Atoi(id) @@ -95,6 +107,11 @@ func (r *ruleDB) EditRuleTx(rule string, id string) (string, Tx, error) { return groupName, nil, fmt.Errorf("failed to read alert id from parameters") } + var userEmail string + if user := common.GetUserFromContext(ctx); user != nil { + userEmail = user.Email + } + updatedAt := time.Now() groupName = prepareTaskName(int64(idInt)) // todo(amol): resolve this error - database locked when using @@ -103,7 +120,7 @@ func (r *ruleDB) EditRuleTx(rule string, id string) (string, Tx, error) { //if err != nil { // return groupName, tx, err //} - stmt, err := r.Prepare(`UPDATE rules SET updated_at=$1, data=$2 WHERE id=$3;`) + stmt, err := r.Prepare(`UPDATE rules SET updated_by=$1, updated_at=$2, data=$3 WHERE id=$4;`) if err != nil { zap.S().Errorf("Error in preparing statement for UPDATE to rules\n", err) // tx.Rollback() @@ -111,7 +128,7 @@ func (r *ruleDB) EditRuleTx(rule string, id string) (string, Tx, error) { } defer stmt.Close() - if _, err := stmt.Exec(time.Now(), rule, idInt); err != nil { + if _, err := stmt.Exec(userEmail, updatedAt, rule, idInt); err != nil { zap.S().Errorf("Error in Executing prepared statement for UPDATE to rules\n", err) // tx.Rollback() // return an error too, we may want to wrap them return groupName, nil, err @@ -121,7 +138,7 @@ func (r *ruleDB) EditRuleTx(rule string, id string) (string, Tx, error) { // DeleteRuleTx deletes a given rule with id and returns // taskname, sql tx and error (if any) -func (r *ruleDB) DeleteRuleTx(id string) (string, Tx, error) { +func (r *ruleDB) DeleteRuleTx(ctx context.Context, id string) (string, Tx, error) { idInt, _ := strconv.Atoi(id) groupName := prepareTaskName(int64(idInt)) @@ -149,11 +166,11 @@ func (r *ruleDB) DeleteRuleTx(id string) (string, Tx, error) { return groupName, nil, nil } -func (r *ruleDB) GetStoredRules() ([]StoredRule, error) { +func (r *ruleDB) GetStoredRules(ctx context.Context) ([]StoredRule, error) { rules := []StoredRule{} - query := fmt.Sprintf("SELECT id, updated_at, data FROM rules") + query := "SELECT id, created_at, created_by, updated_at, updated_by, data FROM rules" err := r.Select(&rules, query) @@ -165,7 +182,7 @@ func (r *ruleDB) GetStoredRules() ([]StoredRule, error) { return rules, nil } -func (r *ruleDB) GetStoredRule(id string) (*StoredRule, error) { +func (r *ruleDB) GetStoredRule(ctx context.Context, id string) (*StoredRule, error) { intId, err := strconv.Atoi(id) if err != nil { return nil, fmt.Errorf("invalid id parameter") @@ -173,7 +190,7 @@ func (r *ruleDB) GetStoredRule(id string) (*StoredRule, error) { rule := &StoredRule{} - query := fmt.Sprintf("SELECT id, updated_at, data FROM rules WHERE id=%d", intId) + query := fmt.Sprintf("SELECT id, created_at, created_by, updated_at, updated_by, data FROM rules WHERE id=%d", intId) err = r.Get(rule, query) // zap.S().Info(query) diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index 70596982d9..30c643b031 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -16,8 +16,9 @@ import ( "go.uber.org/zap" + "errors" + "github.com/jmoiron/sqlx" - "github.com/pkg/errors" // opentracing "github.com/opentracing/opentracing-go" am "go.signoz.io/signoz/pkg/query-service/integrations/alertManager" @@ -27,8 +28,6 @@ import ( "go.signoz.io/signoz/pkg/query-service/utils/labels" ) -// namespace for prom metrics -const namespace = "signoz" const taskNamesuffix = "webAppEditor" func ruleIdFromTaskName(n string) string { @@ -77,8 +76,6 @@ type Manager struct { // datastore to store alert definitions ruleDB RuleDB - // pause all rule tasks - pause bool logger log.Logger featureFlags interfaces.FeatureLookup @@ -142,7 +139,7 @@ func (m *Manager) Pause(b bool) { } func (m *Manager) initiate() error { - storedRules, err := m.ruleDB.GetStoredRules() + storedRules, err := m.ruleDB.GetStoredRules(context.Background()) if err != nil { return err } @@ -172,7 +169,7 @@ func (m *Manager) initiate() error { zap.S().Info("msg:", "migrating rule from JSON to yaml", "\t rule:", rec.Data, "\t parsed rule:", parsedRule) ruleJSON, err := json.Marshal(parsedRule) if err == nil { - taskName, _, err := m.ruleDB.EditRuleTx(string(ruleJSON), fmt.Sprintf("%d", rec.Id)) + taskName, _, err := m.ruleDB.EditRuleTx(context.Background(), string(ruleJSON), fmt.Sprintf("%d", rec.Id)) if err != nil { zap.S().Errorf("msg: failed to migrate rule ", "/t error:", err) } else { @@ -195,6 +192,10 @@ func (m *Manager) initiate() error { } } + if len(loadErrors) > 0 { + return errors.Join(loadErrors...) + } + return nil } @@ -223,11 +224,11 @@ func (m *Manager) Stop() { // EditRuleDefinition writes the rule definition to the // datastore and also updates the rule executor -func (m *Manager) EditRule(ruleStr string, id string) error { +func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error { parsedRule, errs := ParsePostableRule([]byte(ruleStr)) - currentRule, err := m.GetRule(id) + currentRule, err := m.GetRule(ctx, id) if err != nil { zap.S().Errorf("msg: ", "failed to get the rule from rule db", "\t ruleid: ", id) return err @@ -247,7 +248,7 @@ func (m *Manager) EditRule(ruleStr string, id string) error { return errs[0] } - taskName, _, err := m.ruleDB.EditRuleTx(ruleStr, id) + taskName, _, err := m.ruleDB.EditRuleTx(ctx, ruleStr, id) if err != nil { return err } @@ -314,7 +315,7 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error { return nil } -func (m *Manager) DeleteRule(id string) error { +func (m *Manager) DeleteRule(ctx context.Context, id string) error { idInt, err := strconv.Atoi(id) if err != nil { @@ -323,7 +324,7 @@ func (m *Manager) DeleteRule(id string) error { } // update feature usage - rule, err := m.GetRule(id) + rule, err := m.GetRule(ctx, id) if err != nil { zap.S().Errorf("msg: ", "failed to get the rule from rule db", "\t ruleid: ", id) return err @@ -334,7 +335,7 @@ func (m *Manager) DeleteRule(id string) error { m.deleteTask(taskName) } - if _, _, err := m.ruleDB.DeleteRuleTx(id); err != nil { + if _, _, err := m.ruleDB.DeleteRuleTx(ctx, id); err != nil { zap.S().Errorf("msg: ", "failed to delete the rule from rule db", "\t ruleid: ", id) return err } @@ -365,7 +366,7 @@ func (m *Manager) deleteTask(taskName string) { // CreateRule stores rule def into db and also // starts an executor for the rule -func (m *Manager) CreateRule(ruleStr string) error { +func (m *Manager) CreateRule(ctx context.Context, ruleStr string) error { parsedRule, errs := ParsePostableRule([]byte(ruleStr)) // check if the rule uses any feature that is not enabled @@ -380,7 +381,7 @@ func (m *Manager) CreateRule(ruleStr string) error { return errs[0] } - taskName, tx, err := m.ruleDB.CreateRuleTx(ruleStr) + taskName, tx, err := m.ruleDB.CreateRuleTx(ctx, ruleStr) if err != nil { return err } @@ -665,10 +666,10 @@ func (m *Manager) ListActiveRules() ([]Rule, error) { return ruleList, nil } -func (m *Manager) ListRuleStates() (*GettableRules, error) { +func (m *Manager) ListRuleStates(ctx context.Context) (*GettableRules, error) { // fetch rules from DB - storedRules, err := m.ruleDB.GetStoredRules() + storedRules, err := m.ruleDB.GetStoredRules(ctx) if err != nil { return nil, err } @@ -693,14 +694,18 @@ func (m *Manager) ListRuleStates() (*GettableRules, error) { } else { ruleResponse.State = rm.State().String() } + ruleResponse.CreatedAt = s.CreatedAt + ruleResponse.CreatedBy = s.CreatedBy + ruleResponse.UpdatedAt = s.UpdatedAt + ruleResponse.UpdatedBy = s.UpdatedBy resp = append(resp, ruleResponse) } return &GettableRules{Rules: resp}, nil } -func (m *Manager) GetRule(id string) (*GettableRule, error) { - s, err := m.ruleDB.GetStoredRule(id) +func (m *Manager) GetRule(ctx context.Context, id string) (*GettableRule, error) { + s, err := m.ruleDB.GetStoredRule(ctx, id) if err != nil { return nil, err } @@ -746,7 +751,7 @@ func (m *Manager) syncRuleStateWithTask(taskName string, rule *PostableRule) err // - over write the patch attributes received in input (ruleStr) // - re-deploy or undeploy task as necessary // - update the patched rule in the DB -func (m *Manager) PatchRule(ruleStr string, ruleId string) (*GettableRule, error) { +func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string) (*GettableRule, error) { if ruleId == "" { return nil, fmt.Errorf("id is mandatory for patching rule") @@ -755,7 +760,7 @@ func (m *Manager) PatchRule(ruleStr string, ruleId string) (*GettableRule, error taskName := prepareTaskName(ruleId) // retrieve rule from DB - storedJSON, err := m.ruleDB.GetStoredRule(ruleId) + storedJSON, err := m.ruleDB.GetStoredRule(ctx, ruleId) if err != nil { zap.S().Errorf("msg:", "failed to get stored rule with given id", "\t error:", err) return nil, err @@ -789,7 +794,7 @@ func (m *Manager) PatchRule(ruleStr string, ruleId string) (*GettableRule, error } // write updated rule to db - if _, _, err = m.ruleDB.EditRuleTx(string(patchedRuleBytes), ruleId); err != nil { + if _, _, err = m.ruleDB.EditRuleTx(ctx, string(patchedRuleBytes), ruleId); err != nil { // write failed, rollback task state // restore task state from the stored rule From 5c5ee2cc70a0c7ad589494fa9edc0fb44ace9bdf Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Wed, 18 Oct 2023 09:37:57 +0000 Subject: [PATCH 09/23] fix: nil pointer dereference in query builder expression (#3764) --- pkg/query-service/app/queryBuilder/query_builder.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkg/query-service/app/queryBuilder/query_builder.go b/pkg/query-service/app/queryBuilder/query_builder.go index 51d2fce667..dedcff1f10 100644 --- a/pkg/query-service/app/queryBuilder/query_builder.go +++ b/pkg/query-service/app/queryBuilder/query_builder.go @@ -247,7 +247,11 @@ func (qb *QueryBuilder) PrepareQueries(params *v3.QueryRangeParamsV3, args ...in // Build queries for each expression for _, query := range compositeQuery.BuilderQueries { if query.Expression != query.QueryName { - expression, _ := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, EvalFuncs) + expression, err := govaluate.NewEvaluableExpressionWithFunctions(query.Expression, EvalFuncs) + + if err != nil { + return nil, err + } queryString, err := expressionToQuery(params, queries, expression, query.QueryName) if err != nil { From 53dee57e17acb428857da06c3d3969392f836347 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Wed, 18 Oct 2023 20:05:41 +0530 Subject: [PATCH 10/23] Update CODEOWNERS (#3766) --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 356dbaed07..e851767f8f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,7 +3,7 @@ # that they own. * @ankitnayan -/frontend/ @palashgdev +/frontend/ @palashgdev @YounixM /deploy/ @prashant-shahi /sample-apps/ @prashant-shahi **/query-service/ @srikanthccv From 6e20fbb174f08fd4f132d4d22d88f4a411c8d931 Mon Sep 17 00:00:00 2001 From: Yunus M Date: Wed, 18 Oct 2023 21:43:46 +0530 Subject: [PATCH 11/23] fix: update version check login and other minor UI fixes (#3759) * fix: update version check login and other minor UI fixes * fix: update text in billing page * fix: remove useEffect and replace with onSuccess and fix remaining days bug --- .../BillingContainer/BillingContainer.tsx | 81 ++++++++----------- frontend/src/container/Header/index.tsx | 11 +-- frontend/src/container/SideNav/SideNav.tsx | 18 +---- frontend/src/container/Version/index.tsx | 4 +- .../pages/WorkspaceLocked/WorkspaceLocked.tsx | 6 +- frontend/src/utils/app.ts | 8 ++ frontend/src/utils/timeUtils.ts | 13 +++ 7 files changed, 71 insertions(+), 70 deletions(-) diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index f47af4ac0a..f5a35bbeae 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -17,6 +17,7 @@ import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; import { License } from 'types/api/licenses/def'; import AppReducer from 'types/reducer/app'; +import { getFormattedDate } from 'utils/timeUtils'; interface DataType { key: string; @@ -107,19 +108,6 @@ export const getRemainingDays = (billingEndDate: number): number => { return Math.ceil(timeDifference / (1000 * 60 * 60 * 24)); }; -export const getFormattedDate = (date?: number): string => { - if (!date) { - return new Date().toLocaleDateString(); - } - const trialEndDate = new Date(date * 1000); - - const options = { day: 'numeric', month: 'short', year: 'numeric' }; - - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - return trialEndDate.toLocaleDateString(undefined, options); -}; - export default function BillingContainer(): JSX.Element { const daysRemainingStr = 'days remaining in your billing period.'; const [headerText, setHeaderText] = useState(''); @@ -138,35 +126,6 @@ export default function BillingContainer(): JSX.Element { const handleError = useAxiosError(); - const { isLoading, data: usageData } = useQuery( - [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], - { - queryFn: () => getUsage(activeLicense?.key || ''), - onError: handleError, - enabled: activeLicense !== null, - }, - ); - - useEffect(() => { - const activeValidLicense = - licensesData?.payload?.licenses?.find( - (license) => license.isCurrent === true, - ) || null; - - setActiveLicense(activeValidLicense); - - if (!isFetching && licensesData?.payload?.onTrial && !licenseError) { - setIsFreeTrial(true); - setBillAmount(0); - setDaysRemaining(getRemainingDays(licensesData?.payload?.trialEnd)); - setHeaderText( - `You are in free trial period. Your free trial will end on ${getFormattedDate( - licensesData?.payload?.trialEnd, - )}`, - ); - } - }, [isFetching, licensesData?.payload, licenseError]); - const processUsageData = useCallback( (data: any): void => { const { @@ -202,23 +161,51 @@ export default function BillingContainer(): JSX.Element { setTotalBillAmount(total); if (!licensesData?.payload?.onTrial) { + const remainingDays = getRemainingDays(billingPeriodEnd) - 1; + setHeaderText( `Your current billing period is from ${getFormattedDate( billingPeriodStart, )} to ${getFormattedDate(billingPeriodEnd)}`, ); - setDaysRemaining(getRemainingDays(billingPeriodEnd) - 1); + setDaysRemaining(remainingDays > 0 ? remainingDays : 0); setBillAmount(billTotal); } }, [licensesData?.payload?.onTrial], ); + const { isLoading } = useQuery( + [REACT_QUERY_KEY.GET_BILLING_USAGE, user?.userId], + { + queryFn: () => getUsage(activeLicense?.key || ''), + onError: handleError, + enabled: activeLicense !== null, + onSuccess: processUsageData, + }, + ); + useEffect(() => { - if (!isLoading && usageData) { - processUsageData(usageData); + const activeValidLicense = + licensesData?.payload?.licenses?.find( + (license) => license.isCurrent === true, + ) || null; + + setActiveLicense(activeValidLicense); + + if (!isFetching && licensesData?.payload?.onTrial && !licenseError) { + const remainingDays = getRemainingDays(licensesData?.payload?.trialEnd); + + setIsFreeTrial(true); + setBillAmount(0); + setDaysRemaining(remainingDays > 0 ? remainingDays : 0); + setHeaderText( + `You are in free trial period. Your free trial will end on ${getFormattedDate( + licensesData?.payload?.trialEnd, + )}`, + ); } - }, [isLoading, processUsageData, usageData]); + }, [isFetching, licensesData?.payload, licenseError]); const columns: ColumnsType = [ { @@ -402,7 +389,7 @@ export default function BillingContainer(): JSX.Element { - You will be charged only when trial period ends + Your billing will start only after the trial period diff --git a/frontend/src/container/Header/index.tsx b/frontend/src/container/Header/index.tsx index d2463a5e76..e3a97f2d8f 100644 --- a/frontend/src/container/Header/index.tsx +++ b/frontend/src/container/Header/index.tsx @@ -8,10 +8,7 @@ import { import { Button, Divider, MenuProps, Space, Typography } from 'antd'; import { Logout } from 'api/utils'; import ROUTES from 'constants/routes'; -import { - getFormattedDate, - getRemainingDays, -} from 'container/BillingContainer/BillingContainer'; +import { getRemainingDays } from 'container/BillingContainer/BillingContainer'; import Config from 'container/ConfigDropdown'; import { useIsDarkMode, useThemeMode } from 'hooks/useDarkMode'; import useLicense, { LICENSE_PLAN_STATUS } from 'hooks/useLicense'; @@ -29,6 +26,7 @@ import { useSelector } from 'react-redux'; import { NavLink } from 'react-router-dom'; import { AppState } from 'store/reducers'; import AppReducer from 'types/reducer/app'; +import { getFormattedDate } from 'utils/timeUtils'; import CurrentOrganization from './CurrentOrganization'; import ManageLicense from './ManageLicense'; @@ -139,9 +137,12 @@ function HeaderContainer(): JSX.Element { {showTrialExpiryBanner && (
You are in free trial period. Your free trial will end on{' '} - {getFormattedDate(licenseData?.payload?.trialEnd)}. + + {getFormattedDate(licenseData?.payload?.trialEnd || Date.now())}. + {role === 'ADMIN' ? ( + {' '} Please{' '}
- - + {!hideBreadcrumbs && ( + + + + )} {!isRouteToSkip && ( diff --git a/frontend/src/pages/Support/Support.styles.scss b/frontend/src/pages/Support/Support.styles.scss new file mode 100644 index 0000000000..e298f74d8a --- /dev/null +++ b/frontend/src/pages/Support/Support.styles.scss @@ -0,0 +1,53 @@ +.support-page-container { + color: white; + padding-left: 48px; + padding-right: 48px; + + max-width: 1400px; + margin: 0 auto; +} + +.support-channels { + margin: 48px 0; + + display: flex; + flex-wrap: wrap; + gap: 16px; +} + +.support-channel { + flex: 0 0 calc(33.333% - 32px); + min-height: 200px; + position: relative; + + .support-channel-title { + width: 100%; + display: flex; + align-items: center; + gap: 8px; + margin-top: 0px; + } + + .support-channel-action { + position: absolute; + bottom: 24px; + left: 24px; + width: calc(100% - 48px); + + button { + max-width: 100%; + } + } +} + +@media screen and (max-width: 1440px) { + .support-channel { + min-height: 240px; + } +} + +@media screen and (min-width: 1440px) { + .support-page-container { + width: 80%; + } +} diff --git a/frontend/src/pages/Support/Support.tsx b/frontend/src/pages/Support/Support.tsx new file mode 100644 index 0000000000..61d6c11c38 --- /dev/null +++ b/frontend/src/pages/Support/Support.tsx @@ -0,0 +1,169 @@ +import './Support.styles.scss'; + +import { Button, Card, Typography } from 'antd'; +import { + Book, + Cable, + Calendar, + Github, + MessageSquare, + Slack, +} from 'lucide-react'; + +const { Title, Text } = Typography; + +interface Channel { + key: any; + name?: string; + icon?: JSX.Element; + title?: string; + url: any; + btnText?: string; +} + +const channelsMap = { + documentation: 'documentation', + github: 'github', + slack_community: 'slack_community', + chat: 'chat', + schedule_call: 'schedule_call', + slack_connect: 'slack_connect', +}; + +const supportChannels = [ + { + key: 'documentation', + name: 'Documentation', + icon: , + title: 'Find answers in the documentation.', + url: 'https://signoz.io/docs/', + btnText: 'Visit docs', + }, + { + key: 'github', + name: 'Github', + icon: , + title: 'Create an issue on GitHub to report bugs or request new features.', + url: 'https://github.com/SigNoz/signoz/issues', + btnText: 'Create issue', + }, + { + key: 'slack_community', + name: 'Slack Community', + icon: , + title: 'Get support from the SigNoz community on Slack.', + url: 'https://signoz.io/slack', + btnText: 'Join Slack', + }, + { + key: 'chat', + name: 'Chat', + icon: , + title: 'Get quick support directly from the team.', + url: '', + btnText: 'Launch chat', + }, + { + key: 'schedule_call', + name: 'Schedule a call', + icon: , + title: 'Schedule a call with the founders.', + url: 'https://calendly.com/pranay-signoz/signoz-intro-calls', + btnText: 'Schedule call', + }, + { + key: 'slack_connect', + name: 'Slack Connect', + icon: , + title: 'Get a dedicated support channel for your team.', + url: '', + btnText: 'Request Slack connect', + }, +]; + +export default function Support(): JSX.Element { + const handleChannelWithRedirects = (url: string): void => { + window.open(url, '_blank'); + }; + + const handleSlackConnectRequest = (): void => { + const recipient = 'support@signoz.io'; + const subject = 'Slack Connect Request'; + const body = `I'd like to request a dedicated Slack Connect channel for me and my team. Users (emails) to include besides mine:`; + + // Create the mailto link + const mailtoLink = `mailto:${recipient}?subject=${encodeURIComponent( + subject, + )}&body=${encodeURIComponent(body)}`; + + // Open the default email client + window.location.href = mailtoLink; + }; + + const handleChat = (): void => { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + if (window.Intercom) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + window.Intercom('show'); + } + }; + + const handleChannelClick = (channel: Channel): void => { + switch (channel.key) { + case channelsMap.documentation: + case channelsMap.github: + case channelsMap.slack_community: + case channelsMap.schedule_call: + handleChannelWithRedirects(channel.url); + break; + case channelsMap.chat: + handleChat(); + break; + case channelsMap.slack_connect: + handleSlackConnectRequest(); + break; + default: + handleChannelWithRedirects('https://signoz.io/slack'); + break; + } + }; + + return ( +
+
+ Support + + We are here to help in case of questions or issues. Pick the channel that + is most convenient for you. + +
+ +
+ {supportChannels.map( + (channel): JSX.Element => ( + +
+ + {channel.icon} + {channel.name}{' '} + + {channel.title} +
+ +
+ +
+
+ ), + )} +
+
+ ); +} diff --git a/frontend/src/pages/Support/index.tsx b/frontend/src/pages/Support/index.tsx new file mode 100644 index 0000000000..e16e7fedc9 --- /dev/null +++ b/frontend/src/pages/Support/index.tsx @@ -0,0 +1,3 @@ +import Support from './Support'; + +export default Support; diff --git a/frontend/src/utils/app.ts b/frontend/src/utils/app.ts index 55312a11dc..0ab9e6fca7 100644 --- a/frontend/src/utils/app.ts +++ b/frontend/src/utils/app.ts @@ -18,6 +18,12 @@ export const isCloudUser = (): boolean => { return hostname?.endsWith('signoz.cloud'); }; +export const isEECloudUser = (): boolean => { + const { hostname } = window.location; + + return hostname?.endsWith('signoz.io'); +}; + export const checkVersionState = ( currentVersion: string, latestVersion: string, diff --git a/frontend/src/utils/permission/index.ts b/frontend/src/utils/permission/index.ts index 1ca3064720..b4de0e3110 100644 --- a/frontend/src/utils/permission/index.ts +++ b/frontend/src/utils/permission/index.ts @@ -81,5 +81,6 @@ export const routePermission: Record = { GET_STARTED: ['ADMIN', 'EDITOR', 'VIEWER'], WORKSPACE_LOCKED: ['ADMIN', 'EDITOR', 'VIEWER'], BILLING: ['ADMIN', 'EDITOR', 'VIEWER'], + SUPPORT: ['ADMIN', 'EDITOR', 'VIEWER'], SOMETHING_WENT_WRONG: ['ADMIN', 'EDITOR', 'VIEWER'], }; diff --git a/frontend/yarn.lock b/frontend/yarn.lock index b8b7025757..bb32618a25 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -9882,6 +9882,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" +lucide-react@0.288.0: + version "0.288.0" + resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.288.0.tgz#cc9fda209fe4ec6e572efca38f7d3e3cde7422eb" + integrity sha512-ikhb/9LOkq9orPoLV9lLC4UYyoXQycBhIgH7H59ahOkk0mkcAqkD52m84RXedE/qVqZHW8rEJquInT4xGmsNqw== + lz-string@^1.4.4: version "1.5.0" resolved "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz" From e3d08a4275ee1c0c5561db86123098cda7ec1920 Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Thu, 19 Oct 2023 08:30:34 +0000 Subject: [PATCH 13/23] chore: send to customer portal for manage (#3763) --- ee/query-service/app/api/api.go | 1 + ee/query-service/app/api/license.go | 34 ++++++++- frontend/src/api/billing/manage.ts | 30 ++++++++ .../BillingContainer/BillingContainer.tsx | 72 ++++++++++++++----- 4 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 frontend/src/api/billing/manage.ts diff --git a/ee/query-service/app/api/api.go b/ee/query-service/app/api/api.go index 1c0171efac..a17eb7b79a 100644 --- a/ee/query-service/app/api/api.go +++ b/ee/query-service/app/api/api.go @@ -158,6 +158,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *baseapp.AuthMiddlew router.HandleFunc("/api/v1/checkout", am.AdminAccess(ah.checkout)).Methods(http.MethodPost) router.HandleFunc("/api/v1/billing", am.AdminAccess(ah.getBilling)).Methods(http.MethodGet) + router.HandleFunc("/api/v1/portal", am.AdminAccess(ah.portalSession)).Methods(http.MethodPost) router.HandleFunc("/api/v2/licenses", am.ViewAccess(ah.listLicensesV2)). diff --git a/ee/query-service/app/api/license.go b/ee/query-service/app/api/license.go index c125fd10d1..66d108d468 100644 --- a/ee/query-service/app/api/license.go +++ b/ee/query-service/app/api/license.go @@ -14,7 +14,7 @@ import ( type tierBreakdown struct { UnitPrice float64 `json:"unitPrice"` - Quantity int64 `json:"quantity"` + Quantity float64 `json:"quantity"` TierStart int64 `json:"tierStart"` TierEnd int64 `json:"tierEnd"` TierCost float64 `json:"tierCost"` @@ -224,3 +224,35 @@ func (ah *APIHandler) listLicensesV2(w http.ResponseWriter, r *http.Request) { ah.Respond(w, resp) } + +func (ah *APIHandler) portalSession(w http.ResponseWriter, r *http.Request) { + + type checkoutResponse struct { + Status string `json:"status"` + Data struct { + RedirectURL string `json:"redirectURL"` + } `json:"data"` + } + + hClient := &http.Client{} + req, err := http.NewRequest("POST", constants.LicenseSignozIo+"/portal", r.Body) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey) + licenseResp, err := hClient.Do(req) + if err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + // decode response body + var resp checkoutResponse + if err := json.NewDecoder(licenseResp.Body).Decode(&resp); err != nil { + RespondError(w, model.InternalError(err), nil) + return + } + + ah.Respond(w, resp.Data) +} diff --git a/frontend/src/api/billing/manage.ts b/frontend/src/api/billing/manage.ts new file mode 100644 index 0000000000..dca561bdba --- /dev/null +++ b/frontend/src/api/billing/manage.ts @@ -0,0 +1,30 @@ +import axios from 'api'; +import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { AxiosError } from 'axios'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { + CheckoutRequestPayloadProps, + CheckoutSuccessPayloadProps, +} from 'types/api/billing/checkout'; + +const manageCreditCardApi = async ( + props: CheckoutRequestPayloadProps, +): Promise | ErrorResponse> => { + try { + const response = await axios.post('/portal', { + licenseKey: props.licenseKey, + returnURL: props.successURL, + }); + + return { + statusCode: 200, + error: null, + message: response.data.status, + payload: response.data.data, + }; + } catch (error) { + return ErrorResponseHandler(error as AxiosError); + } +}; + +export default manageCreditCardApi; diff --git a/frontend/src/container/BillingContainer/BillingContainer.tsx b/frontend/src/container/BillingContainer/BillingContainer.tsx index f5a35bbeae..3d41317c7c 100644 --- a/frontend/src/container/BillingContainer/BillingContainer.tsx +++ b/frontend/src/container/BillingContainer/BillingContainer.tsx @@ -6,6 +6,7 @@ import { Button, Col, Row, Skeleton, Table, Tag, Typography } from 'antd'; import { ColumnsType } from 'antd/es/table'; import updateCreditCardApi from 'api/billing/checkout'; import getUsage from 'api/billing/getUsage'; +import manageCreditCardApi from 'api/billing/manage'; import { SOMETHING_WENT_WRONG } from 'constants/api'; import { REACT_QUERY_KEY } from 'constants/reactQueryKeys'; import useAxiosError from 'hooks/useAxiosError'; @@ -15,6 +16,8 @@ import { useCallback, useEffect, useState } from 'react'; import { useMutation, useQuery } from 'react-query'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; +import { ErrorResponse, SuccessResponse } from 'types/api'; +import { CheckoutSuccessPayloadProps } from 'types/api/billing/checkout'; import { License } from 'types/api/licenses/def'; import AppReducer from 'types/reducer/app'; import { getFormattedDate } from 'utils/timeUtils'; @@ -271,32 +274,65 @@ export default function BillingContainer(): JSX.Element { /> ); + const handleBillingOnSuccess = ( + data: ErrorResponse | SuccessResponse, + ): void => { + if (data?.payload?.redirectURL) { + const newTab = document.createElement('a'); + newTab.href = data.payload.redirectURL; + newTab.target = '_blank'; + newTab.rel = 'noopener noreferrer'; + newTab.click(); + } + }; + + const handleBillingOnError = (): void => { + notifications.error({ + message: SOMETHING_WENT_WRONG, + }); + }; + const { mutate: updateCreditCard, isLoading: isLoadingBilling } = useMutation( updateCreditCardApi, { onSuccess: (data) => { - if (data.payload?.redirectURL) { - const newTab = document.createElement('a'); - newTab.href = data.payload.redirectURL; - newTab.target = '_blank'; - newTab.rel = 'noopener noreferrer'; - newTab.click(); - } + handleBillingOnSuccess(data); }, - onError: () => - notifications.error({ - message: SOMETHING_WENT_WRONG, - }), + onError: handleBillingOnError, }, ); + const { + mutate: manageCreditCard, + isLoading: isLoadingManageBilling, + } = useMutation(manageCreditCardApi, { + onSuccess: (data) => { + handleBillingOnSuccess(data); + }, + onError: handleBillingOnError, + }); + const handleBilling = useCallback(async () => { - updateCreditCard({ - licenseKey: activeLicense?.key || '', - successURL: window.location.href, - cancelURL: window.location.href, - }); - }, [activeLicense?.key, updateCreditCard]); + if (isFreeTrial && !licensesData?.payload?.trialConvertedToSubscription) { + updateCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.href, + cancelURL: window.location.href, + }); + } else { + manageCreditCard({ + licenseKey: activeLicense?.key || '', + successURL: window.location.href, + cancelURL: window.location.href, + }); + } + }, [ + activeLicense?.key, + isFreeTrial, + licensesData?.payload?.trialConvertedToSubscription, + manageCreditCard, + updateCreditCard, + ]); return (
@@ -330,7 +366,7 @@ export default function BillingContainer(): JSX.Element {
+ Actions +
Date: Thu, 19 Oct 2023 15:37:06 +0530 Subject: [PATCH 17/23] [Refactor]: Jest setup for wrapping Provider and mocking Query Ranges (#3705) * refactor: setup wrapper for all the providers * refactor: done with unit test configuration and service layer testing * refactor: checking for multiple services * refactor: updated test cases added table sort * refactor: moved hooks mocking to test-utils * refactor: added the search test case * refactor: updated the handler with mocks data --- frontend/jest.setup.ts | 9 + frontend/package.json | 1 + .../ServiceMetrics/ServiceMetrics.test.tsx | 50 +++ .../ServiceTraces/ServicTraces.test.tsx | 59 +++ .../ServiceTraces/Service.test.tsx | 29 +- .../container/ServiceTable/Service.test.tsx | 29 +- .../mocks-server/__mockdata__/query_range.ts | 79 ++++ .../src/mocks-server/__mockdata__/services.ts | 22 ++ .../__mockdata__/top_level_operations.ts | 5 + frontend/src/mocks-server/handlers.ts | 73 ++++ frontend/src/mocks-server/server.ts | 7 + frontend/src/mocks-server/setupTests.ts | 10 + frontend/src/pages/Services/Metrics.test.tsx | 73 ++++ frontend/src/tests/test-utils.tsx | 65 ++++ frontend/yarn.lock | 353 +++++++++++++++++- 15 files changed, 801 insertions(+), 63 deletions(-) create mode 100644 frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx create mode 100644 frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx create mode 100644 frontend/src/mocks-server/__mockdata__/query_range.ts create mode 100644 frontend/src/mocks-server/__mockdata__/services.ts create mode 100644 frontend/src/mocks-server/__mockdata__/top_level_operations.ts create mode 100644 frontend/src/mocks-server/handlers.ts create mode 100644 frontend/src/mocks-server/server.ts create mode 100644 frontend/src/mocks-server/setupTests.ts create mode 100644 frontend/src/pages/Services/Metrics.test.tsx create mode 100644 frontend/src/tests/test-utils.tsx diff --git a/frontend/jest.setup.ts b/frontend/jest.setup.ts index c9441402d9..debe6128e2 100644 --- a/frontend/jest.setup.ts +++ b/frontend/jest.setup.ts @@ -8,6 +8,9 @@ import '@testing-library/jest-dom'; import 'jest-styled-components'; +import { server } from './src/mocks-server/server'; +// Establish API mocking before all tests. + // Mock window.matchMedia window.matchMedia = window.matchMedia || @@ -18,3 +21,9 @@ window.matchMedia = removeListener: function () {}, }; }; + +beforeAll(() => server.listen()); + +afterEach(() => server.resetHandlers()); + +afterAll(() => server.close()); diff --git a/frontend/package.json b/frontend/package.json index fb1096565e..62b8f976e9 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -185,6 +185,7 @@ "jest-playwright-preset": "^1.7.2", "jest-styled-components": "^7.0.8", "lint-staged": "^12.5.0", + "msw": "1.3.2", "portfinder-sync": "^0.0.2", "prettier": "2.2.1", "raw-loader": "4.0.2", diff --git a/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx new file mode 100644 index 0000000000..1047b16ca7 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceMetrics/ServiceMetrics.test.tsx @@ -0,0 +1,50 @@ +import { server } from 'mocks-server/server'; +import { rest } from 'msw'; +import { act, render, screen } from 'tests/test-utils'; + +import ServicesUsingMetrics from './index'; + +describe('ServicesUsingMetrics', () => { + test('should render the ServicesUsingMetrics component', async () => { + await act(() => { + render(); + }); + const applicationHeader = await screen.findByText(/application/i); + expect(applicationHeader).toBeInTheDocument(); + const p99LatencyHeader = await screen.findByText(/p99 latency \(in ns\)/i); + expect(p99LatencyHeader).toBeInTheDocument(); + const errorRateHeader = await screen.findByText(/error rate \(% of total\)/i); + expect(errorRateHeader).toBeInTheDocument(); + }); + + test('should render the ServicesUsingMetrics component with loading', async () => { + await act(() => { + render(); + }); + const loadingText = await screen.findByText(/Testapp/i); + expect(loadingText).toBeInTheDocument(); + }); + + test('should not render is the data is not prsent', async () => { + server.use( + rest.post( + 'http://localhost/api/v1/service/top_level_operations', + (req, res, ctx) => + res( + ctx.status(200), + ctx.json({ + SampleApp: ['GET'], + TestApp: ['GET'], + }), + ), + ), + ); + render(); + const loading = screen.getByText(/Loading.../i); + expect(loading).toBeInTheDocument(); + const sampleAppText = await screen.findByText(/SampleApp/i); + expect(sampleAppText).toBeInTheDocument(); + const testAppText = await screen.findByText(/TestApp/i); + expect(testAppText).toBeInTheDocument(); + }); +}); diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx new file mode 100644 index 0000000000..fc6e97e4b4 --- /dev/null +++ b/frontend/src/container/ServiceApplication/ServiceTraces/ServicTraces.test.tsx @@ -0,0 +1,59 @@ +import { act, fireEvent, render, screen } from 'tests/test-utils'; + +import ServiceTraces from '.'; + +describe('ServicesTraces', () => { + test('Should render the component', async () => { + await act(() => { + render(); + }); + const applicationHeader = screen.getByText(/application/i); + expect(applicationHeader).toBeInTheDocument(); + const p99LatencyHeader = screen.getByText(/p99 latency \(in ms\)/i); + expect(p99LatencyHeader).toBeInTheDocument(); + const errorRateHeader = screen.getByText(/error rate \(% of total\)/i); + expect(errorRateHeader).toBeInTheDocument(); + }); + + test('Should render the Services with Services', async () => { + act(() => { + render(); + }); + const servierName = await screen.findByText(/TestService/i, { + exact: true, + }); + expect(servierName).toBeInTheDocument(); + const p99Latency = await screen.findByText(/8\.11/i); + expect(p99Latency).toBeInTheDocument(); + }); + + test('Should click on p99 latency and sort the table', async () => { + act(() => { + render(); + }); + const p99LatencyHeader = await screen.findByText(/p99 latency \(in ms\)/i); + expect(p99LatencyHeader).toBeInTheDocument(); + const firstServiceName = await screen.findByText(/TestService/i); + expect(firstServiceName).toBeInTheDocument(); + const secondServiceName = await screen.findByText(/TestCustomerService/i); + expect(secondServiceName).toBeInTheDocument(); + const allRow = screen.getAllByRole('row'); + expect(allRow).toHaveLength(3); + expect(allRow[1].innerHTML).toContain('TestService'); + expect(allRow[2].innerHTML).toContain('TestCustomerService'); + + const tableHeader = await screen.findAllByRole('columnheader'); + expect(tableHeader).toHaveLength(4); + + fireEvent.click(tableHeader[1]); + + const allSortedRowAsc = screen.getAllByRole('row'); + expect(allSortedRowAsc).toHaveLength(3); + expect(allSortedRowAsc[1].innerHTML).toContain('TestService'); + + fireEvent.click(tableHeader[1]); + const allSortedRowDsc = screen.getAllByRole('row'); + expect(allSortedRowDsc).toHaveLength(3); + expect(allSortedRowDsc[1].innerHTML).toContain('TestCustomerService'); + }); +}); diff --git a/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx b/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx index 2e94296d8d..c6b263a9bb 100644 --- a/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx +++ b/frontend/src/container/ServiceApplication/ServiceTraces/Service.test.tsx @@ -1,24 +1,11 @@ -import { render, screen, waitFor } from '@testing-library/react'; -import ROUTES from 'constants/routes'; -import { BrowserRouter } from 'react-router-dom'; +import { render, screen, waitFor } from 'tests/test-utils'; import { services } from './__mocks__/getServices'; import ServiceTraceTable from './ServiceTracesTable'; -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useLocation: (): { pathname: string } => ({ - pathname: `${process.env.FRONTEND_API_ENDPOINT}${ROUTES.APPLICATION}/`, - }), -})); - describe('Metrics Component', () => { it('renders without errors', async () => { - render( - - - , - ); + render(); await waitFor(() => { expect(screen.getByText(/application/i)).toBeInTheDocument(); @@ -29,21 +16,13 @@ describe('Metrics Component', () => { }); it('renders if the data is loaded in the table', async () => { - render( - - - , - ); + render(); expect(screen.getByText('frontend')).toBeInTheDocument(); }); it('renders no data when required conditions are met', async () => { - render( - - - , - ); + render(); expect(screen.getByText('No data')).toBeInTheDocument(); }); diff --git a/frontend/src/container/ServiceTable/Service.test.tsx b/frontend/src/container/ServiceTable/Service.test.tsx index 4fc9231a78..b26758de68 100644 --- a/frontend/src/container/ServiceTable/Service.test.tsx +++ b/frontend/src/container/ServiceTable/Service.test.tsx @@ -1,24 +1,11 @@ -import { render, screen, waitFor } from '@testing-library/react'; -import ROUTES from 'constants/routes'; -import { BrowserRouter } from 'react-router-dom'; +import { render, screen, waitFor } from 'tests/test-utils'; import { Services } from './__mock__/servicesListMock'; import Metrics from './index'; -jest.mock('react-router-dom', () => ({ - ...jest.requireActual('react-router-dom'), - useLocation: (): { pathname: string } => ({ - pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.APPLICATION}/`, - }), -})); - describe('Metrics Component', () => { it('renders without errors', async () => { - render( - - - , - ); + render(); await waitFor(() => { expect(screen.getByText(/application/i)).toBeInTheDocument(); @@ -29,21 +16,13 @@ describe('Metrics Component', () => { }); it('renders if the data is loaded in the table', async () => { - render( - - - , - ); + render(); expect(screen.getByText('frontend')).toBeInTheDocument(); }); it('renders no data when required conditions are met', async () => { - render( - - - , - ); + render(); expect(screen.getByText('No data')).toBeInTheDocument(); }); diff --git a/frontend/src/mocks-server/__mockdata__/query_range.ts b/frontend/src/mocks-server/__mockdata__/query_range.ts new file mode 100644 index 0000000000..69ff7bfb66 --- /dev/null +++ b/frontend/src/mocks-server/__mockdata__/query_range.ts @@ -0,0 +1,79 @@ +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { QueryRangePayload } from 'types/api/metrics/getQueryRange'; +import { EQueryType } from 'types/common/dashboard'; + +export const queryRangeSuccessResponse: QueryRangePayload = { + status: 'success', + data: { + resultType: '', + result: [ + { + status: 'success', + data: { + resultType: '', + result: [ + { + queryName: 'D', + series: [ + { + labels: { + service_name: 'Test', + }, + labelsArray: [ + { + service_name: 'Test', + }, + ], + values: [ + { + timestamp: 1696917600000, + value: '0', + }, + ], + }, + ], + list: null, + }, + { + queryName: 'F1', + series: null, + list: null, + }, + { + queryName: 'A', + series: [ + { + labels: { + service_name: 'Test', + }, + labelsArray: [ + { + service_name: 'Test', + }, + ], + values: [ + { + timestamp: 1696917600000, + value: 'NaN', + }, + ], + }, + ], + list: null, + }, + ], + }, + }, + ], + }, + compositeQuery: { + builderQueries: undefined, + chQueries: undefined, + promQueries: undefined, + queryType: EQueryType.QUERY_BUILDER, + panelType: PANEL_TYPES.TIME_SERIES, + }, + end: 0, + start: 0, + step: 0, +}; diff --git a/frontend/src/mocks-server/__mockdata__/services.ts b/frontend/src/mocks-server/__mockdata__/services.ts new file mode 100644 index 0000000000..410db3b00d --- /dev/null +++ b/frontend/src/mocks-server/__mockdata__/services.ts @@ -0,0 +1,22 @@ +import { ServicesList } from 'types/api/metrics/getService'; + +export const serviceSuccessResponse: ServicesList[] = [ + { + serviceName: 'TestService', + p99: 8106824, + avgDuration: 2772433.33333335, + numCalls: 1, + callRate: 0.000004960563520015874, + numErrors: 0, + errorRate: 0, + }, + { + serviceName: 'TestCustomerService', + p99: 9106824, + avgDuration: 4772433.333333335, + numCalls: 2, + callRate: 0.000004960563520015874, + numErrors: 0, + errorRate: 0, + }, +]; diff --git a/frontend/src/mocks-server/__mockdata__/top_level_operations.ts b/frontend/src/mocks-server/__mockdata__/top_level_operations.ts new file mode 100644 index 0000000000..e903b39066 --- /dev/null +++ b/frontend/src/mocks-server/__mockdata__/top_level_operations.ts @@ -0,0 +1,5 @@ +import { ServiceDataProps } from 'api/metrics/getTopLevelOperations'; + +export const topLevelOperationSuccessResponse: ServiceDataProps = { + TestApp: ['GET', 'GET frontpage'], +}; diff --git a/frontend/src/mocks-server/handlers.ts b/frontend/src/mocks-server/handlers.ts new file mode 100644 index 0000000000..77089498b5 --- /dev/null +++ b/frontend/src/mocks-server/handlers.ts @@ -0,0 +1,73 @@ +import { rest } from 'msw'; + +import { queryRangeSuccessResponse } from './__mockdata__/query_range'; +import { serviceSuccessResponse } from './__mockdata__/services'; +import { topLevelOperationSuccessResponse } from './__mockdata__/top_level_operations'; + +export const handlers = [ + rest.post('http://localhost/api/v3/query_range', (req, res, ctx) => + res(ctx.status(200), ctx.json(queryRangeSuccessResponse)), + ), + + rest.post('http://localhost/api/v1/services', (req, res, ctx) => + res(ctx.status(200), ctx.json(serviceSuccessResponse)), + ), + + rest.post( + 'http://localhost/api/v1/service/top_level_operations', + (req, res, ctx) => + res(ctx.status(200), ctx.json(topLevelOperationSuccessResponse)), + ), + + rest.get( + 'http://localhost/api/v2/metrics/autocomplete/tagKey', + (req, res, ctx) => { + const metricName = req.url.searchParams.get('metricName'); + const match = req.url.searchParams.get('match'); + + if (metricName === 'signoz_calls_total' && match === 'resource_') { + return res( + ctx.status(200), + ctx.json({ status: 'success', data: ['resource_signoz_collector_id'] }), + ); + } + + return res(ctx.status(500)); + }, + ), + + rest.get( + 'http://localhost/api/v2/metrics/autocomplete/tagValue', + (req, res, ctx) => { + // ?metricName=signoz_calls_total&tagKey=resource_signoz_collector_id + const metricName = req.url.searchParams.get('metricName'); + const tagKey = req.url.searchParams.get('tagKey'); + + if ( + metricName === 'signoz_calls_total' && + tagKey === 'resource_signoz_collector_id' + ) { + return res( + ctx.status(200), + ctx.json({ + status: 'success', + data: [ + 'f38916c2-daf2-4424-bd3e-4907a7e537b6', + '6d4af7f0-4884-4a37-abd4-6bdbee29fa04', + '523c44b9-5fe1-46f7-9163-4d2c57ece09b', + 'aa52e8e8-6f88-4056-8fbd-b377394d022c', + '4d515ba2-065d-4856-b2d8-ddb957c44ddb', + 'fd47a544-1410-4c76-a554-90ef6464da02', + 'bb455f71-3fe1-4761-bbf5-efe2faee18a6', + '48563680-314e-4117-8a6d-1f0389c95e04', + '6e866423-7704-4d72-be8b-4695bc36f145', + 'e4886c76-93f5-430f-9076-eef85524312f', + ], + }), + ); + } + + return res(ctx.status(500)); + }, + ), +]; diff --git a/frontend/src/mocks-server/server.ts b/frontend/src/mocks-server/server.ts new file mode 100644 index 0000000000..096e00d323 --- /dev/null +++ b/frontend/src/mocks-server/server.ts @@ -0,0 +1,7 @@ +// src/mocks/server.js +import { setupServer } from 'msw/node'; + +import { handlers } from './handlers'; + +// This configures a request mocking server with the given request handlers. +export const server = setupServer(...handlers); diff --git a/frontend/src/mocks-server/setupTests.ts b/frontend/src/mocks-server/setupTests.ts new file mode 100644 index 0000000000..96e882c491 --- /dev/null +++ b/frontend/src/mocks-server/setupTests.ts @@ -0,0 +1,10 @@ +import { server } from './server'; +// Establish API mocking before all tests. +beforeAll(() => server.listen()); + +// Reset any request handlers that we may add during the tests, +// so they don't affect other tests. +afterEach(() => server.resetHandlers()); + +// Clean up after the tests are finished. +afterAll(() => server.close()); diff --git a/frontend/src/pages/Services/Metrics.test.tsx b/frontend/src/pages/Services/Metrics.test.tsx new file mode 100644 index 0000000000..960bd490b7 --- /dev/null +++ b/frontend/src/pages/Services/Metrics.test.tsx @@ -0,0 +1,73 @@ +import user from '@testing-library/user-event'; +import { render, screen } from 'tests/test-utils'; + +import Metrics from '.'; + +describe('Services', () => { + test('Should render the component', () => { + render(); + + const inputBox = screen.getByRole('combobox'); + expect(inputBox).toBeInTheDocument(); + + const application = screen.getByRole('columnheader', { + name: /application search/i, + }); + expect(application).toBeInTheDocument(); + + const p99 = screen.getByRole('columnheader', { + name: /p99 latency \(in ms\)/i, + }); + expect(p99).toBeInTheDocument(); + + const errorRate = screen.getByRole('columnheader', { + name: /error rate \(% of total\)/i, + }); + expect(errorRate).toBeInTheDocument(); + + const operationPerSecond = screen.getByRole('columnheader', { + name: /operations per second/i, + }); + expect(operationPerSecond).toBeInTheDocument(); + }); + + test('Should filter the table input according to input typed value', async () => { + user.setup(); + render(); + const inputBox = screen.getByRole('combobox'); + expect(inputBox).toBeInTheDocument(); + + await user.click(inputBox); + + const signozCollectorId = await screen.findAllByText(/signoz.collector.id/i); + expect(signozCollectorId[0]).toBeInTheDocument(); + + await user.click(signozCollectorId[1]); + + await user.click(inputBox); + // await user.click(inputBox); + + const inOperator = await screen.findAllByText(/not in/i); + expect(inOperator[1]).toBeInTheDocument(); + + await user.click(inOperator[1]); + + await user.type(inputBox, '6d'); + + const serviceId = await screen.findAllByText( + /6d4af7f0-4884-4a37-abd4-6bdbee29fa04/i, + ); + + expect(serviceId[1]).toBeInTheDocument(); + + await user.click(serviceId[1]); + + const application = await screen.findByText(/application/i); + expect(application).toBeInTheDocument(); + + await user.click(application); + + const testService = await screen.findByText(/testservice/i); + expect(testService).toBeInTheDocument(); + }, 30000); +}); diff --git a/frontend/src/tests/test-utils.tsx b/frontend/src/tests/test-utils.tsx new file mode 100644 index 0000000000..c6d838a93d --- /dev/null +++ b/frontend/src/tests/test-utils.tsx @@ -0,0 +1,65 @@ +import { render, RenderOptions, RenderResult } from '@testing-library/react'; +import ROUTES from 'constants/routes'; +import { ResourceProvider } from 'hooks/useResourceAttribute'; +import React, { ReactElement } from 'react'; +import { QueryClient, QueryClientProvider } from 'react-query'; +import { Provider } from 'react-redux'; +import { BrowserRouter } from 'react-router-dom'; +import store from 'store'; + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + refetchOnWindowFocus: false, + }, + }, +}); + +afterEach(() => { + queryClient.clear(); +}); + +jest.mock('react-i18next', () => ({ + useTranslation: (): { + t: (str: string) => string; + i18n: { + changeLanguage: () => Promise; + }; + } => ({ + t: (str: string): string => str, + i18n: { + changeLanguage: (): Promise => new Promise(() => {}), + }, + }), +})); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string } => ({ + pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.TRACES_EXPLORER}/`, + }), +})); + +function AllTheProviders({ + children, +}: { + children: React.ReactNode; +}): ReactElement { + return ( + + + + {children} + + + + ); +} + +const customRender = ( + ui: ReactElement, + options?: Omit, +): RenderResult => render(ui, { wrapper: AllTheProviders, ...options }); + +export * from '@testing-library/react'; +export { customRender as render }; diff --git a/frontend/yarn.lock b/frontend/yarn.lock index bb32618a25..6e00178f3d 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -2823,6 +2823,28 @@ dependencies: "@monaco-editor/loader" "^1.3.3" +"@mswjs/cookies@^0.2.2": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@mswjs/cookies/-/cookies-0.2.2.tgz#b4e207bf6989e5d5427539c2443380a33ebb922b" + integrity sha512-mlN83YSrcFgk7Dm1Mys40DLssI1KdJji2CMKN8eOlBqsTADYzj2+jWzsANsUTFbxDMWPD5e9bfA1RGqBpS3O1g== + dependencies: + "@types/set-cookie-parser" "^2.4.0" + set-cookie-parser "^2.4.6" + +"@mswjs/interceptors@^0.17.10": + version "0.17.10" + resolved "https://registry.yarnpkg.com/@mswjs/interceptors/-/interceptors-0.17.10.tgz#857b41f30e2b92345ed9a4e2b1d0a08b8b6fcad4" + integrity sha512-N8x7eSLGcmUFNWZRxT1vsHvypzIRgQYdG0rJey/rZCy6zT/30qDt8Joj7FxzGNLSwXbeZqJOMqDurp7ra4hgbw== + dependencies: + "@open-draft/until" "^1.0.3" + "@types/debug" "^4.1.7" + "@xmldom/xmldom" "^0.8.3" + debug "^4.3.3" + headers-polyfill "3.2.5" + outvariant "^1.2.1" + strict-event-emitter "^0.2.4" + web-encoding "^1.1.5" + "@nodelib/fs.scandir@2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" @@ -2844,6 +2866,11 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@open-draft/until@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@open-draft/until/-/until-1.0.3.tgz#db9cc719191a62e7d9200f6e7bab21c5b848adca" + integrity sha512-Aq58f5HiWdyDlFffbbSjAlv596h/cOnt2DO1w3DOC7OJ5EHs0hd/nycJfiu9RJbT6Yk6F1knnRRXNSpxoIVZ9Q== + "@petamoriken/float16@^3.4.7": version "3.8.0" resolved "https://registry.npmjs.org/@petamoriken/float16/-/float16-3.8.0.tgz" @@ -3135,6 +3162,11 @@ dependencies: "@types/node" "*" +"@types/cookie@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d" + integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q== + "@types/copy-webpack-plugin@^8.0.1": version "8.0.1" resolved "https://registry.npmjs.org/@types/copy-webpack-plugin/-/copy-webpack-plugin-8.0.1.tgz" @@ -3163,6 +3195,13 @@ dependencies: "@types/ms" "*" +"@types/debug@^4.1.7": + version "4.1.9" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.9.tgz#906996938bc672aaf2fb8c0d3733ae1dda05b005" + integrity sha512-8Hz50m2eoS56ldRlepxSBa6PWEVCtzUo/92HgLc2qTMnotJNIm7xP+UZhyWoYsyOdd5dxZ+NZLb24rsKyFs2ow== + dependencies: + "@types/ms" "*" + "@types/dompurify@^2.4.0": version "2.4.0" resolved "https://registry.npmjs.org/@types/dompurify/-/dompurify-2.4.0.tgz" @@ -3312,6 +3351,11 @@ resolved "https://registry.npmjs.org/@types/js-cookie/-/js-cookie-2.2.7.tgz" integrity sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA== +"@types/js-levenshtein@^1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@types/js-levenshtein/-/js-levenshtein-1.1.1.tgz#ba05426a43f9e4e30b631941e0aa17bf0c890ed5" + integrity sha512-qC4bCqYGy1y/NP7dDVr7KJarn+PbX1nSpwA7JXdu0HxT3QYjO8MJ+cntENtHFVy2dRAyBV23OZ6MxsW1AM1L8g== + "@types/json-schema@*", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.7", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.11" resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz" @@ -3567,6 +3611,13 @@ "@types/mime" "*" "@types/node" "*" +"@types/set-cookie-parser@^2.4.0": + version "2.4.4" + resolved "https://registry.yarnpkg.com/@types/set-cookie-parser/-/set-cookie-parser-2.4.4.tgz#3c36c9147960cca0fc7c508aacb18ea41f6b5003" + integrity sha512-xCfTC/eL/GmvMC24b42qJpYSTdCIBwWcfskDF80ztXtnU6pKXyvuZP2EConb2K9ps0s7gMhCa0P1foy7wiItMA== + dependencies: + "@types/node" "*" + "@types/sockjs@^0.3.33": version "0.3.33" resolved "https://registry.npmjs.org/@types/sockjs/-/sockjs-0.3.33.tgz" @@ -3960,6 +4011,11 @@ resolved "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.7.0.tgz" integrity sha512-oxnCNGj88fL+xzV+dacXs44HcDwf1ovs3AuEzvP7mqXw7fQntqIhQ1BRmynh4qEKQSSSRSWVyXRjmTbZIX9V2Q== +"@xmldom/xmldom@^0.8.3": + version "0.8.10" + resolved "https://registry.yarnpkg.com/@xmldom/xmldom/-/xmldom-0.8.10.tgz#a1337ca426aa61cef9fe15b5b28e340a72f6fa99" + integrity sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw== + "@xobotyi/scrollbar-width@^1.9.5": version "1.9.5" resolved "https://registry.npmjs.org/@xobotyi/scrollbar-width/-/scrollbar-width-1.9.5.tgz" @@ -3983,6 +4039,11 @@ resolved "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== +"@zxing/text-encoding@0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@zxing/text-encoding/-/text-encoding-0.9.0.tgz#fb50ffabc6c7c66a0c96b4c03e3d9be74864b70b" + integrity sha512-U/4aVJ2mxI0aDNI8Uq0wEhMgY+u4CNtEb0om3+y3+niDAsoTCOB33UF0sxpzqzdqXLqmvc+vZyAt4O8pPdfkwA== + JSONStream@^1.0.4: version "1.3.5" resolved "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz" @@ -4978,6 +5039,15 @@ binary-extensions@^2.0.0: resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + body-parser@1.20.1: version "1.20.1" resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz" @@ -5094,6 +5164,14 @@ buffer-to-arraybuffer@^0.0.5: resolved "https://registry.npmjs.org/buffer-to-arraybuffer/-/buffer-to-arraybuffer-0.0.5.tgz" integrity sha512-3dthu5CYiVB1DEJp61FtApNnNndTckcqe4pFcLdvHtrpG+kcyekCJKg4MRiDcFW7A6AODnXB9U4dwQiCW5kzJQ== +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + buffer@^6.0.3: version "6.0.3" resolved "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz" @@ -5225,7 +5303,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0: +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: version "4.1.2" resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -5273,6 +5351,11 @@ character-reference-invalid@^2.0.0: resolved "https://registry.yarnpkg.com/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz#85c66b041e43b47210faf401278abf808ac45cb9" integrity sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw== +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + chart.js@3.9.1: version "3.9.1" resolved "https://registry.npmjs.org/chart.js/-/chart.js-3.9.1.tgz" @@ -5288,7 +5371,7 @@ chartjs-plugin-annotation@^1.4.0: resolved "https://registry.npmjs.org/chartjs-plugin-annotation/-/chartjs-plugin-annotation-1.4.0.tgz" integrity sha512-OC0eGoVvdxTtGGi8mV3Dr+G1YmMhtYYQWqGMb2uWcgcnyiBslaRKPofKwAYWPbh7ABnmQNsNDQLIKPH+XiaZLA== -"chokidar@>=3.0.0 <4.0.0", chokidar@^3.5.3: +"chokidar@>=3.0.0 <4.0.0", chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -5342,6 +5425,11 @@ cli-cursor@^3.1.0: dependencies: restore-cursor "^3.1.0" +cli-spinners@^2.5.0: + version "2.9.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.1.tgz#9c0b9dad69a6d47cbb4333c14319b060ed395a35" + integrity sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ== + cli-truncate@^2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz" @@ -5358,6 +5446,11 @@ cli-truncate@^3.1.0: slice-ansi "^5.0.0" string-width "^5.0.0" +cli-width@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + cliui@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz" @@ -5394,6 +5487,11 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== + clsx@^1.1.1: version "1.2.1" resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" @@ -5655,6 +5753,11 @@ cookie@0.5.0: resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== +cookie@^0.4.2: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + copy-anything@^2.0.1: version "2.0.6" resolved "https://registry.npmjs.org/copy-anything/-/copy-anything-2.0.6.tgz" @@ -6166,7 +6269,7 @@ dayjs@^1.10.7, dayjs@^1.11.1: resolved "https://registry.npmjs.org/dayjs/-/dayjs-1.11.7.tgz" integrity sha512-+Yw9U6YO5TQohxLcIkrXBeY73WP3ejHWVvx8XCk3gxvQDCTEmS48ZrSZCKciI7Bhl/uCMyxYtE9UqRILmFphkQ== -debug@2.6.9, debug@4, debug@4.3.4, debug@^3.2.6, debug@^3.2.7, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4, debug@ngokevin/debug#noTimestamp: +debug@2.6.9, debug@4, debug@4.3.4, debug@^3.2.6, debug@^3.2.7, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@ngokevin/debug#noTimestamp: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -6270,6 +6373,13 @@ default-require-extensions@^3.0.0: dependencies: strip-bom "^4.0.0" +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz" @@ -7173,7 +7283,7 @@ eventemitter3@^4.0.0: resolved "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== -events@^3.2.0: +events@^3.2.0, events@^3.3.0: version "3.3.0" resolved "https://registry.npmjs.org/events/-/events-3.3.0.tgz" integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== @@ -7273,6 +7383,15 @@ extend@^3.0.0: resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" @@ -7357,6 +7476,13 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" +figures@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + file-entry-cache@^6.0.1: version "6.0.1" resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" @@ -7791,6 +7917,11 @@ graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== +graphql@^16.8.1: + version "16.8.1" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.8.1.tgz#1930a965bef1170603702acdb68aedd3f3cf6f07" + integrity sha512-59LZHPdGZVh695Ud9lRzPBVTtlX9ZCV150Er2W43ro37wVof0ctenSaskPPjN7lVTIN8mSZt8PHUNKZuNQUuxw== + gzip-size@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" @@ -8034,6 +8165,11 @@ he@^1.2.0: resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== +headers-polyfill@3.2.5: + version "3.2.5" + resolved "https://registry.yarnpkg.com/headers-polyfill/-/headers-polyfill-3.2.5.tgz#6e67d392c9d113d37448fe45014e0afdd168faed" + integrity sha512-tUCGvt191vNSQgttSyJoibR+VO+I6+iCHIUdhzEMJKE+EAL8BwCN7fUOZlY4ofOelNHsK+gEjxB/B+9N3EWtdA== + highlight.js@^10.4.1, highlight.js@~10.7.0: version "10.7.3" resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.7.3.tgz#697272e3991356e40c3cac566a74eef681756531" @@ -8254,7 +8390,7 @@ i18next@^21.6.12: dependencies: "@babel/runtime" "^7.17.2" -iconv-lite@0.4.24: +iconv-lite@0.4.24, iconv-lite@^0.4.24: version "0.4.24" resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== @@ -8273,7 +8409,7 @@ icss-utils@^5.0.0, icss-utils@^5.1.0: resolved "https://registry.npmjs.org/icss-utils/-/icss-utils-5.1.0.tgz" integrity sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA== -ieee754@^1.1.12, ieee754@^1.2.1: +ieee754@^1.1.12, ieee754@^1.1.13, ieee754@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -8342,7 +8478,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -8370,6 +8506,27 @@ inline-style-prefixer@^6.0.0: css-in-js-utils "^3.1.0" fast-loops "^1.1.3" +inquirer@^8.2.0: + version "8.2.6" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.6.tgz#733b74888195d8d400a67ac332011b5fae5ea562" + integrity sha512-M1WuAmb7pn9zdFRtQYk26ZBoY043Sse0wVDdk4Bppr+JOXyQYybdtvK+l9wUibhtjdjvtoiNy8tk+EgsYIUqKg== + dependencies: + ansi-escapes "^4.2.1" + chalk "^4.1.1" + cli-cursor "^3.1.0" + cli-width "^3.0.0" + external-editor "^3.0.3" + figures "^3.0.0" + lodash "^4.17.21" + mute-stream "0.0.8" + ora "^5.4.1" + run-async "^2.4.0" + rxjs "^7.5.5" + string-width "^4.1.0" + strip-ansi "^6.0.0" + through "^2.3.6" + wrap-ansi "^6.0.1" + internal-slot@^1.0.3, internal-slot@^1.0.4, internal-slot@^1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz" @@ -8432,7 +8589,7 @@ is-alphanumerical@^2.0.0: is-alphabetical "^2.0.0" is-decimal "^2.0.0" -is-arguments@^1.1.1: +is-arguments@^1.0.4, is-arguments@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz" integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== @@ -8564,6 +8721,13 @@ is-generator-fn@^2.0.0: resolved "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz" integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== +is-generator-function@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + dependencies: + has-tostringtag "^1.0.0" + is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" @@ -8581,6 +8745,11 @@ is-hexadecimal@^2.0.0: resolved "https://registry.yarnpkg.com/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz#86b5bf668fca307498d319dfc03289d781a90027" integrity sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg== +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + is-map@^2.0.1, is-map@^2.0.2: version "2.0.2" resolved "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz" @@ -8591,6 +8760,11 @@ is-negative-zero@^2.0.2: resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz" integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== +is-node-process@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/is-node-process/-/is-node-process-1.2.0.tgz#ea02a1b90ddb3934a19aea414e88edef7e11d134" + integrity sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw== + is-number-object@^1.0.4: version "1.0.7" resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" @@ -8704,11 +8878,23 @@ is-typed-array@^1.1.10, is-typed-array@^1.1.9: gopd "^1.0.1" has-tostringtag "^1.0.0" +is-typed-array@^1.1.3: + version "1.1.12" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== + dependencies: + which-typed-array "^1.1.11" + is-typedarray@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + is-weakmap@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz" @@ -9411,6 +9597,11 @@ js-cookie@^2.2.1: resolved "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz" integrity sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ== +js-levenshtein@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" + integrity sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g== + js-sha3@0.8.0: version "0.8.0" resolved "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz" @@ -9826,6 +10017,14 @@ lodash@4.17.21, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17. resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== +log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + log-update@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/log-update/-/log-update-4.0.0.tgz" @@ -10715,6 +10914,31 @@ ms@2.1.3: resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== +msw@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/msw/-/msw-1.3.2.tgz#35e0271293e893fc3c55116e90aad5d955c66899" + integrity sha512-wKLhFPR+NitYTkQl5047pia0reNGgf0P6a1eTnA5aNlripmiz0sabMvvHcicE8kQ3/gZcI0YiPFWmYfowfm3lA== + dependencies: + "@mswjs/cookies" "^0.2.2" + "@mswjs/interceptors" "^0.17.10" + "@open-draft/until" "^1.0.3" + "@types/cookie" "^0.4.1" + "@types/js-levenshtein" "^1.1.1" + chalk "^4.1.1" + chokidar "^3.4.2" + cookie "^0.4.2" + graphql "^16.8.1" + headers-polyfill "3.2.5" + inquirer "^8.2.0" + is-node-process "^1.2.0" + js-levenshtein "^1.1.6" + node-fetch "^2.6.7" + outvariant "^1.4.0" + path-to-regexp "^6.2.0" + strict-event-emitter "^0.4.3" + type-fest "^2.19.0" + yargs "^17.3.1" + multicast-dns@^7.2.5: version "7.2.5" resolved "https://registry.npmjs.org/multicast-dns/-/multicast-dns-7.2.5.tgz" @@ -10723,6 +10947,11 @@ multicast-dns@^7.2.5: dns-packet "^5.2.2" thunky "^1.0.2" +mute-stream@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== + nano-css@^5.3.1: version "5.3.5" resolved "https://registry.npmjs.org/nano-css/-/nano-css-5.3.5.tgz" @@ -10839,6 +11068,13 @@ node-fetch@2.6.7: dependencies: whatwg-url "^5.0.0" +node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + node-forge@^1: version "1.3.1" resolved "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz" @@ -11141,11 +11377,36 @@ optionator@^0.9.1: type-check "^0.4.0" word-wrap "^1.2.3" +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + os-homedir@^1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz" integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ== +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== + +outvariant@^1.2.1, outvariant@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/outvariant/-/outvariant-1.4.0.tgz#e742e4bda77692da3eca698ef5bfac62d9fba06e" + integrity sha512-AlWY719RF02ujitly7Kk/0QlV+pXGFDHrHf9O2OKqyqgBieaPOIeuSkL8sRK6j2WK+/ZAURq2kZsY0d8JapUiw== + p-limit@^2.2.0: version "2.3.0" resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" @@ -11396,6 +11657,11 @@ path-to-regexp@^1.7.0: dependencies: isarray "0.0.1" +path-to-regexp@^6.2.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.1.tgz#d54934d6798eb9e5ef14e7af7962c945906918e5" + integrity sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw== + path-type@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" @@ -12699,7 +12965,7 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" -readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.6: +readable-stream@3, readable-stream@^3.0.0, readable-stream@^3.0.6, readable-stream@^3.4.0: version "3.6.2" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== @@ -13161,6 +13427,11 @@ rtl-css-js@^1.14.0: dependencies: "@babel/runtime" "^7.1.2" +run-async@^2.4.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + run-parallel@^1.1.9: version "1.2.0" resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" @@ -13409,6 +13680,11 @@ set-blocking@^2.0.0: resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== +set-cookie-parser@^2.4.6: + version "2.6.0" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.6.0.tgz#131921e50f62ff1a66a461d7d62d7b21d5d15a51" + integrity sha512-RVnVQxTXuerk653XfuliOxBP81Sf0+qfQE73LIYKcyMYHG94AuH0kgrQpRDuTZnSmjpysHmzxJXKNfa6PjFhyQ== + set-harmonic-interval@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/set-harmonic-interval/-/set-harmonic-interval-1.0.1.tgz" @@ -13761,6 +14037,18 @@ stream@^0.0.2: dependencies: emitter-component "^1.1.1" +strict-event-emitter@^0.2.4: + version "0.2.8" + resolved "https://registry.yarnpkg.com/strict-event-emitter/-/strict-event-emitter-0.2.8.tgz#b4e768927c67273c14c13d20e19d5e6c934b47ca" + integrity sha512-KDf/ujU8Zud3YaLtMCcTI4xkZlZVIYxTLr+XIULexP+77EEVWixeXroLUXQXiVtH4XH2W7jr/3PT1v3zBuvc3A== + dependencies: + events "^3.3.0" + +strict-event-emitter@^0.4.3: + version "0.4.6" + resolved "https://registry.yarnpkg.com/strict-event-emitter/-/strict-event-emitter-0.4.6.tgz#ff347c8162b3e931e3ff5f02cfce6772c3b07eb3" + integrity sha512-12KWeb+wixJohmnwNFerbyiBrAlq5qJLwIt38etRtKtmmHyDSoGlIqFE9wx+4IwG0aDjI7GV8tc8ZccjWZZtTg== + strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz" @@ -14172,7 +14460,7 @@ through2@^4.0.0: dependencies: readable-stream "3" -"through@>=2.2.7 <3", through@^2.3.8: +"through@>=2.2.7 <3", through@^2.3.6, through@^2.3.8: version "2.3.8" resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== @@ -14207,6 +14495,13 @@ tinycolor2@1, tinycolor2@1.6.0, tinycolor2@^1.6.0: resolved "https://registry.yarnpkg.com/tinycolor2/-/tinycolor2-1.6.0.tgz#f98007460169b0263b97072c5ae92484ce02d09e" integrity sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw== +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + tmpl@1.0.5: version "1.0.5" resolved "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz" @@ -14408,6 +14703,11 @@ type-fest@^0.8.0, type-fest@^0.8.1: resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== +type-fest@^2.19.0: + version "2.19.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" + integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== + type-is@~1.6.18: version "1.6.18" resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" @@ -14673,6 +14973,17 @@ util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== +util@^0.12.3: + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== + dependencies: + inherits "^2.0.3" + is-arguments "^1.0.4" + is-generator-function "^1.0.7" + is-typed-array "^1.1.3" + which-typed-array "^1.1.2" + utila@~0.4: version "0.4.0" resolved "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz" @@ -14822,6 +15133,22 @@ wbuf@^1.1.0, wbuf@^1.7.3: dependencies: minimalistic-assert "^1.0.0" +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + +web-encoding@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/web-encoding/-/web-encoding-1.1.5.tgz#fc810cf7667364a6335c939913f5051d3e0c4864" + integrity sha512-HYLeVCdJ0+lBYV2FvNZmv3HJ2Nt0QYXqZojk3d9FJOLkwnuhzM9tmamh8d7HPM8QqjKH8DeHkFTx+CFlWpZZDA== + dependencies: + util "^0.12.3" + optionalDependencies: + "@zxing/text-encoding" "0.9.0" + web-namespaces@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-2.0.1.tgz#1010ff7c650eccb2592cebeeaf9a1b253fd40692" @@ -15140,7 +15467,7 @@ which-module@^2.0.0: resolved "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz" integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ== -which-typed-array@^1.1.10: +which-typed-array@^1.1.10, which-typed-array@^1.1.11, which-typed-array@^1.1.2: version "1.1.11" resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.11.tgz#99d691f23c72aab6768680805a271b69761ed61a" integrity sha512-qe9UWWpkeG5yzZ0tNYxDmd7vo58HDBc39mZ0xWWpolAGADdFOzkfamWLDxkOWcvHQKVmdTyQdLD4NOfjLWTKew== @@ -15192,7 +15519,7 @@ word-wrapper@^1.0.7: resolved "https://registry.npmjs.org/word-wrapper/-/word-wrapper-1.0.7.tgz" integrity sha512-VOPBFCm9b6FyYKQYfn9AVn2dQvdR/YOVFV6IBRA1TBMJWKffvhEX1af6FMGrttILs2Q9ikCRhLqkbY2weW6dOQ== -wrap-ansi@^6.2.0: +wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== @@ -15390,7 +15717,7 @@ yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" -yargs@^17.0.0: +yargs@^17.0.0, yargs@^17.3.1: version "17.7.2" resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz" integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== From 63b503a9fb20e373bcf1d52ae941f95a7c24b6fd Mon Sep 17 00:00:00 2001 From: Raj Kamal Singh <1133322+rkssisodiya@users.noreply.github.com> Date: Thu, 19 Oct 2023 20:01:58 +0530 Subject: [PATCH 18/23] Feat/opamp managed otel collector all deployments (#3774) * chore: get working and add config for opamp managed collector * chore: add config for opamp managed collector to pkg/query-service/tests/test-deploy/ * chore: add config for opamp managed collector to docker-swarm deployment * chore: update signoz-otel-collector image version & specify --copy-path * chore: retain root user comment in docker-compose-core.yaml * chore: update collector image tag in swarm conf and add qs dependencies * chore: some more cleanup --- .../clickhouse-setup/docker-compose.yaml | 10 ++++--- .../otel-collector-opamp-config.yaml | 1 + .../clickhouse-setup/docker-compose-core.yaml | 27 ++++++++++++++++++- .../docker-compose-local.yaml | 4 +-- .../clickhouse-setup/docker-compose.yaml | 9 +++++-- .../tests/test-deploy/docker-compose.yaml | 12 +++++++-- .../otel-collector-opamp-config.yaml | 1 + 7 files changed, 54 insertions(+), 10 deletions(-) create mode 100644 deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml create mode 100644 pkg/query-service/tests/test-deploy/otel-collector-opamp-config.yaml diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 378d47aebd..2516d96728 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -197,15 +197,17 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.9 command: [ "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName" ] user: root # required for reading docker container logs volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - /var/lib/docker/containers:/var/lib/docker/containers:ro environment: - OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}},dockerswarm.service.name={{.Service.Name}},dockerswarm.task.name={{.Task.Name}} @@ -227,10 +229,12 @@ services: mode: global restart_policy: condition: on-failure - <<: *clickhouse-depend + depends_on: + - clickhouse + - query-service otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.9 command: [ "--config=/etc/otel-collector-metrics-config.yaml", diff --git a/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml b/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml new file mode 100644 index 0000000000..e408b55ef6 --- /dev/null +++ b/deploy/docker-swarm/clickhouse-setup/otel-collector-opamp-config.yaml @@ -0,0 +1 @@ +server_endpoint: ws://query-service:4320/v1/opamp diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 4912e3a1b5..6c4edc2827 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -1,6 +1,23 @@ version: "2.4" services: + zookeeper-1: + image: bitnami/zookeeper:3.7.1 + container_name: signoz-zookeeper-1 + hostname: zookeeper-1 + user: root + ports: + - "2181:2181" + - "2888:2888" + - "3888:3888" + volumes: + - ./data/zookeeper-1:/bitnami/zookeeper + environment: + - ZOO_SERVER_ID=1 + # - ZOO_SERVERS=0.0.0.0:2888:3888,zookeeper-2:2888:3888,zookeeper-3:2888:3888 + - ALLOW_ANONYMOUS_LOGIN=yes + - ZOO_AUTOPURGE_INTERVAL=1 + clickhouse: image: clickhouse/clickhouse-server:23.7.3-alpine container_name: signoz-clickhouse @@ -11,8 +28,11 @@ services: volumes: - ./clickhouse-config.xml:/etc/clickhouse-server/config.xml - ./clickhouse-users.xml:/etc/clickhouse-server/users.xml + - ./custom-function.xml:/etc/clickhouse-server/custom-function.xml + - ./clickhouse-cluster.xml:/etc/clickhouse-server/config.d/cluster.xml # - ./clickhouse-storage.xml:/etc/clickhouse-server/config.d/storage.xml - ./data/clickhouse/:/var/lib/clickhouse/ + - ./user_scripts:/var/lib/clickhouse/user_scripts/ restart: on-failure logging: options: @@ -48,15 +68,18 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.9 command: [ "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName" ] # user: root # required for reading docker container logs volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml environment: - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux ports: @@ -75,6 +98,8 @@ services: depends_on: clickhouse: condition: service_healthy + query-service: + condition: service_healthy otel-collector-metrics: container_name: signoz-otel-collector-metrics diff --git a/deploy/docker/clickhouse-setup/docker-compose-local.yaml b/deploy/docker/clickhouse-setup/docker-compose-local.yaml index 78aa72ff75..a92c3dbcd9 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-local.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-local.yaml @@ -4,8 +4,8 @@ services: query-service: hostname: query-service build: - context: "../../../pkg/query-service" - dockerfile: "./Dockerfile" + context: "../../../" + dockerfile: "./pkg/query-service/Dockerfile" args: LDFLAGS: "" TARGETPLATFORM: "${GOOS}/${GOARCH}" diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 163179ffb9..83e3c6812d 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -213,12 +213,13 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.8} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.9} container_name: signoz-otel-collector command: [ "--config=/etc/otel-collector-config.yaml", "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName" ] user: root # required for reading docker container logs @@ -243,7 +244,11 @@ services: # - "55678:55678" # OpenCensus receiver # - "55679:55679" # zPages extension restart: on-failure - <<: *clickhouse-depend + depends_on: + clickhouse: + condition: service_healthy + query-service: + condition: service_healthy otel-collector-metrics: image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.8} diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index 6d7bc5160b..df879f4099 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -190,16 +190,19 @@ services: <<: *clickhouse-depends otel-collector: - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.9 container_name: signoz-otel-collector command: [ "--config=/etc/otel-collector-config.yaml", + "--manager-config=/etc/manager-config.yaml", + "--copy-path=/var/tmp/collector-config.yaml", "--feature-gates=-pkg.translator.prometheus.NormalizeName" ] user: root # required for reading docker container logs volumes: - ./otel-collector-config.yaml:/etc/otel-collector-config.yaml + - ./otel-collector-opamp-config.yaml:/etc/manager-config.yaml - /var/lib/docker/containers:/var/lib/docker/containers:ro environment: - OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux @@ -218,7 +221,12 @@ services: # - "55678:55678" # OpenCensus receiver # - "55679:55679" # zPages extension restart: on-failure - <<: *clickhouse-depends + depends_on: + clickhouse: + condition: service_healthy + query-service: + condition: service_healthy + otel-collector-metrics: image: signoz/signoz-otel-collector:0.79.8 diff --git a/pkg/query-service/tests/test-deploy/otel-collector-opamp-config.yaml b/pkg/query-service/tests/test-deploy/otel-collector-opamp-config.yaml new file mode 100644 index 0000000000..e408b55ef6 --- /dev/null +++ b/pkg/query-service/tests/test-deploy/otel-collector-opamp-config.yaml @@ -0,0 +1 @@ +server_endpoint: ws://query-service:4320/v1/opamp From 3f912edc98434353194bf637131a7c1878dd771a Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Thu, 19 Oct 2023 22:19:55 +0545 Subject: [PATCH 19/23] =?UTF-8?q?chore(release):=20=F0=9F=93=8C=20pin=20ve?= =?UTF-8?q?rsions:=20SigNoz=200.32.0,=20SigNoz=20OtelCollector=200.79.10?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 8 ++++---- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 4 ++-- deploy/docker/clickhouse-setup/docker-compose.yaml | 8 ++++---- pkg/query-service/tests/test-deploy/docker-compose.yaml | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 2516d96728..5c62ceafdc 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -144,7 +144,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.31.1 + image: signoz/query-service:0.32.0 command: [ "-config=/root/config/prometheus.yml", @@ -184,7 +184,7 @@ services: <<: *clickhouse-depend frontend: - image: signoz/frontend:0.31.1 + image: signoz/frontend:0.32.0 deploy: restart_policy: condition: on-failure @@ -197,7 +197,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-config.yaml", @@ -234,7 +234,7 @@ services: - query-service otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-metrics-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 6c4edc2827..8ef264d73e 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -68,7 +68,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-config.yaml", @@ -103,7 +103,7 @@ services: otel-collector-metrics: container_name: signoz-otel-collector-metrics - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-metrics-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 83e3c6812d..6eeee79b1a 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -162,7 +162,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` query-service: - image: signoz/query-service:${DOCKER_TAG:-0.31.1} + image: signoz/query-service:${DOCKER_TAG:-0.32.0} container_name: signoz-query-service command: [ @@ -201,7 +201,7 @@ services: <<: *clickhouse-depend frontend: - image: signoz/frontend:${DOCKER_TAG:-0.31.1} + image: signoz/frontend:${DOCKER_TAG:-0.32.0} container_name: signoz-frontend restart: on-failure depends_on: @@ -213,7 +213,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.9} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} container_name: signoz-otel-collector command: [ @@ -251,7 +251,7 @@ services: condition: service_healthy otel-collector-metrics: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.8} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} container_name: signoz-otel-collector-metrics command: [ diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index df879f4099..03a602035b 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -190,7 +190,7 @@ services: <<: *clickhouse-depends otel-collector: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 container_name: signoz-otel-collector command: [ @@ -229,7 +229,7 @@ services: otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.10 container_name: signoz-otel-collector-metrics command: [ From ab42700245c7c07af6cb8e0d4167b5a2677a37ea Mon Sep 17 00:00:00 2001 From: Dhawal Sanghvi <43755122+dhawal1248@users.noreply.github.com> Date: Fri, 20 Oct 2023 12:37:45 +0530 Subject: [PATCH 20/23] query-service: add cluster name cli flag (#3713) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: query-service add cluster name cli flag * chore: add schema migrator to docker compose file * chore: add schema migrator to docker swarm compose file * chore: 📌 pin versions: Schema Migrator 0.79.10 and update compose files * chore: 🔧 update compose depends_on for schema-migrator service --------- Co-authored-by: Prashant Shahi --- .../clickhouse-setup/docker-compose.yaml | 30 ++++++++++++++----- .../clickhouse-setup/docker-compose-core.yaml | 21 +++++++++++-- .../clickhouse-setup/docker-compose.yaml | 28 +++++++++++++---- ee/query-service/app/db/reader.go | 3 +- ee/query-service/app/server.go | 2 ++ ee/query-service/main.go | 3 ++ .../app/clickhouseReader/reader.go | 28 +++++++++-------- pkg/query-service/app/server.go | 2 ++ pkg/query-service/main.go | 3 ++ .../tests/test-deploy/docker-compose.yaml | 28 +++++++++++++---- 10 files changed, 114 insertions(+), 34 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 2516d96728..24fc87f216 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -33,12 +33,14 @@ x-clickhouse-defaults: &clickhouse-defaults soft: 262144 hard: 262144 -x-clickhouse-depend: &clickhouse-depend +x-db-depend: &db-depend depends_on: - clickhouse + - otel-collector-migrator # - clickhouse-2 # - clickhouse-3 + services: zookeeper-1: image: bitnami/zookeeper:3.7.1 @@ -144,7 +146,7 @@ services: condition: on-failure query-service: - image: signoz/query-service:0.31.1 + image: signoz/query-service:0.32.0 command: [ "-config=/root/config/prometheus.yml", @@ -181,10 +183,10 @@ services: deploy: restart_policy: condition: on-failure - <<: *clickhouse-depend + <<: *db-depend frontend: - image: signoz/frontend:0.31.1 + image: signoz/frontend:0.32.0 deploy: restart_policy: condition: on-failure @@ -197,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-config.yaml", @@ -231,10 +233,24 @@ services: condition: on-failure depends_on: - clickhouse + - otel-collector-migrator - query-service + otel-collector-migrator: + image: signoz/signoz-schema-migrator:0.79.10 + deploy: + restart_policy: + condition: on-failure + delay: 5s + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + - clickhouse + # - clickhouse-2 + # - clickhouse-3 + otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-metrics-config.yaml", @@ -250,7 +266,7 @@ services: deploy: restart_policy: condition: on-failure - <<: *clickhouse-depend + <<: *db-depend logspout: image: "gliderlabs/logspout:v3.2.14" diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 6c4edc2827..38f6e32b24 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -65,10 +65,23 @@ services: - --queryService.url=http://query-service:8085 - --storage.path=/data + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-config.yaml", @@ -98,12 +111,14 @@ services: depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully query-service: condition: service_healthy otel-collector-metrics: container_name: signoz-otel-collector-metrics - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.10 command: [ "--config=/etc/otel-collector-metrics-config.yaml", @@ -120,6 +135,8 @@ services: depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully logspout: image: "gliderlabs/logspout:v3.2.14" diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 83e3c6812d..b110da41d8 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -32,10 +32,12 @@ x-clickhouse-defaults: &clickhouse-defaults soft: 262144 hard: 262144 -x-clickhouse-depend: &clickhouse-depend +x-db-depend: &db-depend depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully # clickhouse-2: # condition: service_healthy # clickhouse-3: @@ -198,7 +200,7 @@ services: interval: 30s timeout: 5s retries: 3 - <<: *clickhouse-depend + <<: *db-depend frontend: image: signoz/frontend:${DOCKER_TAG:-0.31.1} @@ -212,8 +214,22 @@ services: volumes: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy + + otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.9} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} container_name: signoz-otel-collector command: [ @@ -247,11 +263,13 @@ services: depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully query-service: condition: service_healthy otel-collector-metrics: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.8} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} container_name: signoz-otel-collector-metrics command: [ @@ -266,7 +284,7 @@ services: # - "13133:13133" # Health check extension # - "55679:55679" # zPages extension restart: on-failure - <<: *clickhouse-depend + <<: *db-depend logspout: image: "gliderlabs/logspout:v3.2.14" diff --git a/ee/query-service/app/db/reader.go b/ee/query-service/app/db/reader.go index c0236548b1..b8326058ec 100644 --- a/ee/query-service/app/db/reader.go +++ b/ee/query-service/app/db/reader.go @@ -24,8 +24,9 @@ func NewDataConnector( maxIdleConns int, maxOpenConns int, dialTimeout time.Duration, + cluster string, ) *ClickhouseReader { - ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout) + ch := basechr.NewReader(localDB, promConfigPath, lm, maxIdleConns, maxOpenConns, dialTimeout, cluster) return &ClickhouseReader{ conn: ch.GetConn(), appdb: localDB, diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index 4d457776a1..3d50ec5ede 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -67,6 +67,7 @@ type ServerOptions struct { DialTimeout time.Duration CacheConfigPath string FluxInterval string + Cluster string } // Server runs HTTP api service @@ -139,6 +140,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { serverOptions.MaxIdleConns, serverOptions.MaxOpenConns, serverOptions.DialTimeout, + serverOptions.Cluster, ) go qb.Start(readerReady) reader = qb diff --git a/ee/query-service/main.go b/ee/query-service/main.go index d9b90340ae..427f78059b 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -81,6 +81,7 @@ func main() { // the url used to build link in the alert messages in slack and other systems var ruleRepoURL string + var cluster string var cacheConfigPath, fluxInterval string var enableQueryServiceLogOTLPExport bool @@ -103,6 +104,7 @@ func main() { flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)") flag.BoolVar(&enableQueryServiceLogOTLPExport, "enable.query.service.log.otlp.export", false, "(enable query service log otlp export)") + flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") flag.Parse() @@ -128,6 +130,7 @@ func main() { DialTimeout: dialTimeout, CacheConfigPath: cacheConfigPath, FluxInterval: fluxInterval, + Cluster: cluster, } // Read the jwt secret key diff --git a/pkg/query-service/app/clickhouseReader/reader.go b/pkg/query-service/app/clickhouseReader/reader.go index 1a6f768994..2c24b4b8c1 100644 --- a/pkg/query-service/app/clickhouseReader/reader.go +++ b/pkg/query-service/app/clickhouseReader/reader.go @@ -55,7 +55,6 @@ import ( ) const ( - cluster = "cluster" primaryNamespace = "clickhouse" archiveNamespace = "clickhouse-archive" signozTraceDBName = "signoz_traces" @@ -116,6 +115,7 @@ type ClickHouseReader struct { featureFlags interfaces.FeatureLookup liveTailRefreshSeconds int + cluster string } // NewTraceReader returns a TraceReader for the database @@ -126,6 +126,7 @@ func NewReader( maxIdleConns int, maxOpenConns int, dialTimeout time.Duration, + cluster string, ) *ClickHouseReader { datasource := os.Getenv("ClickHouseUrl") @@ -168,6 +169,7 @@ func NewReader( liveTailRefreshSeconds: options.primary.LiveTailRefreshSeconds, promConfigFile: configFile, featureFlags: featureFlag, + cluster: cluster, } } @@ -2287,7 +2289,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } req := fmt.Sprintf( "ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(timestamp) + INTERVAL %v SECOND DELETE", - tableName, cluster, params.DelDuration) + tableName, r.cluster, params.DelDuration) if len(params.ColdStorageVolume) > 0 { req += fmt.Sprintf(", toDateTime(timestamp) + INTERVAL %v SECOND TO VOLUME '%s'", params.ToColdStorageDuration, params.ColdStorageVolume) @@ -2342,7 +2344,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } req := fmt.Sprintf( "ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(toUInt32(timestamp_ms / 1000), 'UTC') + "+ - "INTERVAL %v SECOND DELETE", tableName, cluster, params.DelDuration) + "INTERVAL %v SECOND DELETE", tableName, r.cluster, params.DelDuration) if len(params.ColdStorageVolume) > 0 { req += fmt.Sprintf(", toDateTime(toUInt32(timestamp_ms / 1000), 'UTC')"+ " + INTERVAL %v SECOND TO VOLUME '%s'", @@ -2396,7 +2398,7 @@ func (r *ClickHouseReader) SetTTL(ctx context.Context, } req := fmt.Sprintf( "ALTER TABLE %v ON CLUSTER %s MODIFY TTL toDateTime(timestamp / 1000000000) + "+ - "INTERVAL %v SECOND DELETE", tableName, cluster, params.DelDuration) + "INTERVAL %v SECOND DELETE", tableName, r.cluster, params.DelDuration) if len(params.ColdStorageVolume) > 0 { req += fmt.Sprintf(", toDateTime(timestamp / 1000000000)"+ " + INTERVAL %v SECOND TO VOLUME '%s'", @@ -2502,7 +2504,7 @@ func (r *ClickHouseReader) setColdStorage(ctx context.Context, tableName string, // Set the storage policy for the required table. If it is already set, then setting it again // will not a problem. if len(coldStorageVolume) > 0 { - policyReq := fmt.Sprintf("ALTER TABLE %s ON CLUSTER %s MODIFY SETTING storage_policy='tiered'", tableName, cluster) + policyReq := fmt.Sprintf("ALTER TABLE %s ON CLUSTER %s MODIFY SETTING storage_policy='tiered'", tableName, r.cluster) zap.S().Debugf("Executing Storage policy request: %s\n", policyReq) if err := r.db.Exec(ctx, policyReq); err != nil { @@ -3480,7 +3482,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda // create materialized column query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s %s MATERIALIZED %s[indexOf(%s, '%s')] CODEC(ZSTD(1))", r.logsDB, r.logsLocalTable, - cluster, + r.cluster, colname, field.DataType, valueColName, keyColName, @@ -3493,7 +3495,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s %s MATERIALIZED -1", r.logsDB, r.logsTable, - cluster, + r.cluster, colname, field.DataType, ) err = r.db.Exec(ctx, query) @@ -3504,7 +3506,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda // create exists column query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s_exists bool MATERIALIZED if(indexOf(%s, '%s') != 0, true, false) CODEC(ZSTD(1))", r.logsDB, r.logsLocalTable, - cluster, + r.cluster, colname, keyColName, field.Name, @@ -3516,7 +3518,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD COLUMN IF NOT EXISTS %s_exists bool MATERIALIZED false", r.logsDB, r.logsTable, - cluster, + r.cluster, colname, ) err = r.db.Exec(ctx, query) @@ -3533,7 +3535,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda } query = fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s ADD INDEX IF NOT EXISTS %s_idx (%s) TYPE %s GRANULARITY %d", r.logsDB, r.logsLocalTable, - cluster, + r.cluster, colname, colname, field.IndexType, @@ -3546,7 +3548,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda } else { // Delete the index first - query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, cluster, colname) + query := fmt.Sprintf("ALTER TABLE %s.%s ON CLUSTER %s DROP INDEX IF EXISTS %s_idx", r.logsDB, r.logsLocalTable, r.cluster, colname) err := r.db.Exec(ctx, query) if err != nil { return &model.ApiError{Err: err, Typ: model.ErrorInternal} @@ -3557,7 +3559,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda query := "ALTER TABLE %s.%s ON CLUSTER %s DROP COLUMN IF EXISTS %s " err := r.db.Exec(ctx, fmt.Sprintf(query, r.logsDB, table, - cluster, + r.cluster, colname, ), ) @@ -3569,7 +3571,7 @@ func (r *ClickHouseReader) UpdateLogField(ctx context.Context, field *model.Upda query = "ALTER TABLE %s.%s ON CLUSTER %s DROP COLUMN IF EXISTS %s_exists " err = r.db.Exec(ctx, fmt.Sprintf(query, r.logsDB, table, - cluster, + r.cluster, colname, ), ) diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index b71b9bde3e..f7e6e43d2c 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -58,6 +58,7 @@ type ServerOptions struct { DialTimeout time.Duration CacheConfigPath string FluxInterval string + Cluster string } // Server runs HTTP, Mux and a grpc server @@ -119,6 +120,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) { serverOptions.MaxIdleConns, serverOptions.MaxOpenConns, serverOptions.DialTimeout, + serverOptions.Cluster, ) go clickhouseReader.Start(readerReady) reader = clickhouseReader diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index 76382b10c0..bb0f9e1aca 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -34,6 +34,7 @@ func main() { // the url used to build link in the alert messages in slack and other systems var ruleRepoURL, cacheConfigPath, fluxInterval string + var cluster string var preferDelta bool var preferSpanMetrics bool @@ -53,6 +54,7 @@ func main() { flag.StringVar(&ruleRepoURL, "rules.repo-url", constants.AlertHelpPage, "(host address used to build rule link in alert messages)") flag.StringVar(&cacheConfigPath, "experimental.cache-config", "", "(cache config to use)") flag.StringVar(&fluxInterval, "flux-interval", "5m", "(cache config to use)") + flag.StringVar(&cluster, "cluster", "cluster", "(cluster name - defaults to 'cluster')") flag.Parse() loggerMgr := initZapLog() @@ -76,6 +78,7 @@ func main() { DialTimeout: dialTimeout, CacheConfigPath: cacheConfigPath, FluxInterval: fluxInterval, + Cluster: cluster, } // Read the jwt secret key diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index df879f4099..f44833ef47 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -31,10 +31,12 @@ x-clickhouse-defaults: &clickhouse-defaults soft: 262144 hard: 262144 -x-clickhouse-depends: &clickhouse-depends +x-db-depend: &db-depend depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully # clickhouse-2: # condition: service_healthy # clickhouse-3: @@ -187,10 +189,23 @@ services: interval: 30s timeout: 5s retries: 3 - <<: *clickhouse-depends + <<: *db-depend + + otel-collector-migrator: + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + container_name: otel-migrator + command: + - "--dsn=tcp://clickhouse:9000" + depends_on: + clickhouse: + condition: service_healthy + # clickhouse-2: + # condition: service_healthy + # clickhouse-3: + # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.79.9 + image: signoz/signoz-otel-collector:0.79.10 container_name: signoz-otel-collector command: [ @@ -224,12 +239,13 @@ services: depends_on: clickhouse: condition: service_healthy + otel-collector-migrator: + condition: service_completed_successfully query-service: condition: service_healthy - otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.8 + image: signoz/signoz-otel-collector:0.79.10 container_name: signoz-otel-collector-metrics command: [ @@ -244,7 +260,7 @@ services: # - "13133:13133" # Health check extension # - "55679:55679" # zPages extension restart: on-failure - <<: *clickhouse-depends + <<: *db-depend logspout: image: "gliderlabs/logspout:v3.2.14" From ba05991222294fd0d01bb021068f1d309c35dbb7 Mon Sep 17 00:00:00 2001 From: Vishal Sharma Date: Fri, 20 Oct 2023 13:59:44 +0530 Subject: [PATCH 21/23] chore: update logs/traces retention to 15days (#3780) --- deploy/install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/install.sh b/deploy/install.sh index e93798c560..1d4905b6f6 100755 --- a/deploy/install.sh +++ b/deploy/install.sh @@ -534,7 +534,7 @@ else echo "" echo -e "🟢 Your frontend is running on http://localhost:3301" echo "" - echo "ℹ️ By default, retention period is set to 7 days for logs and traces, and 30 days for metrics." + echo "ℹ️ By default, retention period is set to 15 days for logs and traces, and 30 days for metrics." echo -e "To change this, navigate to the General tab on the Settings page of SigNoz UI. For more details, refer to https://signoz.io/docs/userguide/retention-period \n" echo "ℹ️ To bring down SigNoz and clean volumes : $sudo_cmd docker-compose -f ./docker/clickhouse-setup/docker-compose.yaml down -v" From 7a18bddce33e9f55179e3be3e285e7c8c10353cf Mon Sep 17 00:00:00 2001 From: Prashant Shahi Date: Fri, 20 Oct 2023 14:26:29 +0545 Subject: [PATCH 22/23] =?UTF-8?q?chore(release):=20=F0=9F=93=8C=20pin=20ve?= =?UTF-8?q?rsions:=20SigNoz=20OtelCollector=200.79.11?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Prashant Shahi --- deploy/docker-swarm/clickhouse-setup/docker-compose.yaml | 6 +++--- deploy/docker/clickhouse-setup/docker-compose-core.yaml | 6 +++--- deploy/docker/clickhouse-setup/docker-compose.yaml | 6 +++--- pkg/query-service/tests/test-deploy/docker-compose.yaml | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml index 24fc87f216..929a3c47d6 100644 --- a/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker-swarm/clickhouse-setup/docker-compose.yaml @@ -199,7 +199,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector: - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 command: [ "--config=/etc/otel-collector-config.yaml", @@ -237,7 +237,7 @@ services: - query-service otel-collector-migrator: - image: signoz/signoz-schema-migrator:0.79.10 + image: signoz/signoz-schema-migrator:0.79.11 deploy: restart_policy: condition: on-failure @@ -250,7 +250,7 @@ services: # - clickhouse-3 otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 command: [ "--config=/etc/otel-collector-metrics-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose-core.yaml b/deploy/docker/clickhouse-setup/docker-compose-core.yaml index 38f6e32b24..e259b78df7 100644 --- a/deploy/docker/clickhouse-setup/docker-compose-core.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose-core.yaml @@ -66,7 +66,7 @@ services: - --storage.path=/data otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.11} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -81,7 +81,7 @@ services: # Notes for Maintainers/Contributors who will change Line Numbers of Frontend & Query-Section. Please Update Line Numbers in `./scripts/commentLinesForSetup.sh` & `./CONTRIBUTING.md` otel-collector: container_name: signoz-otel-collector - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 command: [ "--config=/etc/otel-collector-config.yaml", @@ -118,7 +118,7 @@ services: otel-collector-metrics: container_name: signoz-otel-collector-metrics - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 command: [ "--config=/etc/otel-collector-metrics-config.yaml", diff --git a/deploy/docker/clickhouse-setup/docker-compose.yaml b/deploy/docker/clickhouse-setup/docker-compose.yaml index 94b8faf490..585ed92be8 100644 --- a/deploy/docker/clickhouse-setup/docker-compose.yaml +++ b/deploy/docker/clickhouse-setup/docker-compose.yaml @@ -215,7 +215,7 @@ services: - ../common/nginx-config.conf:/etc/nginx/conf.d/default.conf otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.11} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -229,7 +229,7 @@ services: otel-collector: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.11} container_name: signoz-otel-collector command: [ @@ -269,7 +269,7 @@ services: condition: service_healthy otel-collector-metrics: - image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.10} + image: signoz/signoz-otel-collector:${OTELCOL_TAG:-0.79.11} container_name: signoz-otel-collector-metrics command: [ diff --git a/pkg/query-service/tests/test-deploy/docker-compose.yaml b/pkg/query-service/tests/test-deploy/docker-compose.yaml index f44833ef47..5ccded7ec3 100644 --- a/pkg/query-service/tests/test-deploy/docker-compose.yaml +++ b/pkg/query-service/tests/test-deploy/docker-compose.yaml @@ -192,7 +192,7 @@ services: <<: *db-depend otel-collector-migrator: - image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.10} + image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-0.79.11} container_name: otel-migrator command: - "--dsn=tcp://clickhouse:9000" @@ -205,7 +205,7 @@ services: # condition: service_healthy otel-collector: - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 container_name: signoz-otel-collector command: [ @@ -245,7 +245,7 @@ services: condition: service_healthy otel-collector-metrics: - image: signoz/signoz-otel-collector:0.79.10 + image: signoz/signoz-otel-collector:0.79.11 container_name: signoz-otel-collector-metrics command: [ From 5e0c068cb92fb381a32f6d1371968a3ca0d06169 Mon Sep 17 00:00:00 2001 From: Palash Gupta Date: Fri, 20 Oct 2023 14:14:37 +0530 Subject: [PATCH 23/23] fix: warinng is now limited when timeseries panel is in widget (#3779) --- frontend/src/container/GridCardLayout/GridCard/index.tsx | 4 ++-- .../NewWidget/LeftContainer/WidgetGraph/WidgetGraph.tsx | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/src/container/GridCardLayout/GridCard/index.tsx b/frontend/src/container/GridCardLayout/GridCard/index.tsx index 598f4dd708..7ebcc65e5c 100644 --- a/frontend/src/container/GridCardLayout/GridCard/index.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/index.tsx @@ -92,9 +92,9 @@ function GridCardGraph({ }, ], createDataset: undefined, - isWarningLimit: true, + isWarningLimit: widget.panelTypes === PANEL_TYPES.TIME_SERIES, }), - [queryResponse], + [queryResponse, widget?.panelTypes], ); const isEmptyLayout = widget?.id === PANEL_TYPES.EMPTY_WIDGET; diff --git a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraph.tsx b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraph.tsx index c3edb0a9f1..fd46163807 100644 --- a/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraph.tsx +++ b/frontend/src/container/NewWidget/LeftContainer/WidgetGraph/WidgetGraph.tsx @@ -1,6 +1,7 @@ import { WarningOutlined } from '@ant-design/icons'; import { Card, Tooltip, Typography } from 'antd'; import Spinner from 'components/Spinner'; +import { PANEL_TYPES } from 'constants/queryBuilder'; import { errorTooltipPosition, tooltipStyles, @@ -67,7 +68,7 @@ function WidgetGraph({ { queryData: getWidgetQueryRange.data?.payload.data.result ?? [] }, ], createDataset: undefined, - isWarningLimit: true, + isWarningLimit: selectedWidget.panelTypes === PANEL_TYPES.TIME_SERIES, }); return (