mirror of
https://git.mirrors.martin98.com/https://github.com/SigNoz/signoz
synced 2025-08-12 02:29:03 +08:00
feat(ruler): update the ruler and planned maintenance tables (#7535)
* feat(ruler): base setup for rules and planned maintenance tables * feat(ruler): more changes for making ruler org aware * feat(ruler): fix lint * feat(ruler): update the edit planned maintenance function * feat(ruler): local testing edits for planned maintenance * feat(ruler): abstract store and types from rules pkg * feat(ruler): abstract store and types from rules pkg * feat(ruler): abstract out store and add migration * feat(ruler): frontend changes and review comments * feat(ruler): add back compareAndSelectConfig * feat(ruler): changes for alertmanager matchers * feat(ruler): addressed review comments * feat(ruler): remove the cascade operations from rules table * feat(ruler): update the template for alertmanager * feat(ruler): implement the rule history changes * feat(ruler): implement the rule history changes * feat(ruler): implement the rule history changes --------- Co-authored-by: Vibhu Pandey <vibhupandey28@gmail.com>
This commit is contained in:
parent
2fa944d254
commit
16e0fa2eef
@ -428,11 +428,11 @@ func (s *Server) initListeners() error {
|
||||
}
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start() error {
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start()
|
||||
s.ruleManager.Start(ctx)
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
@ -516,7 +516,7 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop()
|
||||
s.ruleManager.Stop(context.Background())
|
||||
}
|
||||
|
||||
// stop usage manager
|
||||
|
@ -143,7 +143,7 @@ func main() {
|
||||
zap.L().Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(); err != nil {
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
zap.L().Fatal("Could not start server", zap.Error(err))
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@ -52,7 +53,7 @@ type AnomalyRule struct {
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
p *baserules.PostableRule,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
@ -60,7 +61,7 @@ func NewAnomalyRule(
|
||||
|
||||
zap.L().Info("creating new AnomalyRule", zap.String("id", id), zap.Any("opts", opts))
|
||||
|
||||
if p.RuleCondition.CompareOp == baserules.ValueIsBelow {
|
||||
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
|
||||
target := -1 * *p.RuleCondition.Target
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
@ -117,7 +118,7 @@ func NewAnomalyRule(
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() baserules.RuleType {
|
||||
func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
}
|
||||
|
||||
@ -157,7 +158,7 @@ func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baserules.Vector, error) {
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
@ -184,7 +185,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, ts time.Time) (baser
|
||||
}
|
||||
}
|
||||
|
||||
var resultVector baserules.Vector
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
zap.L().Info("anomaly scores", zap.String("scores", string(scoresJSON)))
|
||||
@ -213,7 +214,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*baserules.Alert, len(res))
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
@ -225,7 +226,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := baserules.AlertTemplateData(l, value, threshold)
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
@ -233,7 +234,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := baserules.NewTemplateExpander(
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
@ -278,7 +279,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &baserules.Alert{
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
@ -319,7 +320,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > baserules.ResolvedRetention) {
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
@ -375,10 +376,10 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
func (r *AnomalyRule) String() string {
|
||||
|
||||
ar := baserules.PostableRule{
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: baserules.Duration(r.EvalWindow()),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -18,7 +19,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
if opts.Rule.RuleType == baserules.RuleTypeThreshold {
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
@ -37,9 +38,9 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeProm {
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
pr, err := baserules.NewPromRule(
|
||||
@ -58,9 +59,9 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == baserules.RuleTypeAnomaly {
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
@ -77,10 +78,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, baserules.RuleTypeProm, baserules.RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -105,12 +106,12 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, baserules.TestAlertPostFix)
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, ruletypes.TestAlertPostFix)
|
||||
|
||||
var rule baserules.Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == baserules.RuleTypeThreshold {
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
@ -134,7 +135,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeProm {
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = baserules.NewPromRule(
|
||||
@ -152,7 +153,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
}
|
||||
} else if parsedRule.RuleType == baserules.RuleTypeAnomaly {
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
rule, err = NewAnomalyRule(
|
||||
alertname,
|
||||
@ -190,9 +191,9 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, ruleDB baserules.RuleDB) baserules.Task {
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, ruleDB)
|
||||
return baserules.NewPromRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/alerts/save';
|
||||
|
||||
@ -7,7 +8,7 @@ import put from './put';
|
||||
const save = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
if (props.id && props.id > 0) {
|
||||
if (props.id && !isEmpty(props.id)) {
|
||||
return put({ ...props });
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ function CreateRules(): JSX.Element {
|
||||
alertType={alertType}
|
||||
formInstance={formInstance}
|
||||
initialValue={initValues}
|
||||
ruleId={0}
|
||||
ruleId=""
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ function EditRules({ initialValue, ruleId }: EditRulesProps): JSX.Element {
|
||||
|
||||
interface EditRulesProps {
|
||||
initialValue: AlertDef;
|
||||
ruleId: number;
|
||||
ruleId: string;
|
||||
}
|
||||
|
||||
export default EditRules;
|
||||
|
@ -11,6 +11,7 @@ import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
|
||||
import { QueryBuilder } from 'container/QueryBuilder';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { Atom, Play, Terminal } from 'lucide-react';
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
@ -156,7 +157,7 @@ function QuerySection({
|
||||
runQuery();
|
||||
logEvent('Alert: Stage and run query', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
isNewRule: !ruleId || isEmpty(ruleId),
|
||||
ruleId,
|
||||
queryType: queryCategory,
|
||||
});
|
||||
@ -230,7 +231,7 @@ interface QuerySectionProps {
|
||||
runQuery: VoidFunction;
|
||||
alertDef: AlertDef;
|
||||
panelType: PANEL_TYPES;
|
||||
ruleId: number;
|
||||
ruleId: string;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
@ -21,7 +21,7 @@ import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { isEmpty, isEqual } from 'lodash-es';
|
||||
import { BellDot, ExternalLink } from 'lucide-react';
|
||||
import Tabs2 from 'periscope/components/Tabs2';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
@ -121,7 +121,7 @@ function FormAlertRules({
|
||||
// use query client
|
||||
const ruleCache = useQueryClient();
|
||||
|
||||
const isNewRule = ruleId === 0;
|
||||
const isNewRule = !ruleId || isEmpty(ruleId);
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [queryStatus, setQueryStatus] = useState<string>('');
|
||||
@ -481,7 +481,7 @@ function FormAlertRules({
|
||||
|
||||
try {
|
||||
const apiReq =
|
||||
ruleId && ruleId > 0
|
||||
ruleId && !isEmpty(ruleId)
|
||||
? { data: postableAlert, id: ruleId }
|
||||
: { data: postableAlert };
|
||||
|
||||
@ -491,7 +491,7 @@ function FormAlertRules({
|
||||
logData = {
|
||||
status: 'success',
|
||||
statusMessage:
|
||||
!ruleId || ruleId === 0 ? t('rule_created') : t('rule_edited'),
|
||||
!ruleId || isEmpty(ruleId) ? t('rule_created') : t('rule_edited'),
|
||||
};
|
||||
|
||||
notifications.success({
|
||||
@ -543,7 +543,7 @@ function FormAlertRules({
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[postableAlert?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
isNewRule: !ruleId || isEmpty(ruleId),
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
alertId: postableAlert?.id,
|
||||
@ -628,7 +628,7 @@ function FormAlertRules({
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
channelNames: postableAlert?.preferredChannels,
|
||||
broadcastToAll: postableAlert?.broadcastToAll,
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
isNewRule: !ruleId || isEmpty(ruleId),
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
status: statusResponse.status,
|
||||
@ -700,7 +700,7 @@ function FormAlertRules({
|
||||
alertDef?.broadcastToAll ||
|
||||
(alertDef.preferredChannels && alertDef.preferredChannels.length > 0);
|
||||
|
||||
const isRuleCreated = !ruleId || ruleId === 0;
|
||||
const isRuleCreated = !ruleId || isEmpty(ruleId);
|
||||
|
||||
function handleRedirection(option: AlertTypes): void {
|
||||
let url;
|
||||
@ -716,7 +716,7 @@ function FormAlertRules({
|
||||
if (url) {
|
||||
logEvent('Alert: Check example alert clicked', {
|
||||
dataSource: ALERTS_DATA_SOURCE_MAP[alertDef?.alertType as AlertTypes],
|
||||
isNewRule: !ruleId || ruleId === 0,
|
||||
isNewRule: !ruleId || isEmpty(ruleId),
|
||||
ruleId,
|
||||
queryType: currentQuery.queryType,
|
||||
link: url,
|
||||
@ -881,8 +881,8 @@ function FormAlertRules({
|
||||
type="default"
|
||||
onClick={onCancelHandler}
|
||||
>
|
||||
{ruleId === 0 && t('button_cancelchanges')}
|
||||
{ruleId > 0 && t('button_discard')}
|
||||
{(!ruleId || isEmpty(ruleId)) && t('button_cancelchanges')}
|
||||
{ruleId && !isEmpty(ruleId) && t('button_discard')}
|
||||
</ActionButton>
|
||||
</ButtonContainer>
|
||||
</MainFormContainer>
|
||||
@ -899,7 +899,7 @@ interface FormAlertRuleProps {
|
||||
alertType?: AlertTypes;
|
||||
formInstance: FormInstance;
|
||||
initialValue: AlertDef;
|
||||
ruleId: number;
|
||||
ruleId: string;
|
||||
}
|
||||
|
||||
export default FormAlertRules;
|
||||
|
@ -153,7 +153,9 @@ export default function AlertRules({
|
||||
<div className="alert-rule-item-name-container home-data-item-name-container">
|
||||
<img
|
||||
src={
|
||||
rule.id % 2 === 0 ? '/Icons/eight-ball.svg' : '/Icons/circus-tent.svg'
|
||||
Math.random() % 2 === 0
|
||||
? '/Icons/eight-ball.svg'
|
||||
: '/Icons/circus-tent.svg'
|
||||
}
|
||||
alt="alert-rules"
|
||||
className="alert-rules-img"
|
||||
|
@ -24,7 +24,7 @@ function DeleteAlert({
|
||||
|
||||
const defaultErrorMessage = 'Something went wrong';
|
||||
|
||||
const onDeleteHandler = async (id: number): Promise<void> => {
|
||||
const onDeleteHandler = async (id: string): Promise<void> => {
|
||||
try {
|
||||
const response = await deleteAlerts({
|
||||
id,
|
||||
|
@ -25,7 +25,7 @@ function ToggleAlertState({
|
||||
const defaultErrorMessage = 'Something went wrong';
|
||||
|
||||
const onToggleHandler = async (
|
||||
id: number,
|
||||
id: string,
|
||||
disabled: boolean,
|
||||
): Promise<void> => {
|
||||
try {
|
||||
|
@ -138,7 +138,7 @@ export const deleteDowntimeHandler = ({
|
||||
export const createEditDowntimeSchedule = async (
|
||||
props: DowntimeScheduleUpdatePayload,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
if (props.id && props.id > 0) {
|
||||
if (props.id) {
|
||||
return updateDowntimeSchedule({ ...props });
|
||||
}
|
||||
return createDowntimeSchedule({ ...props.data });
|
||||
|
@ -44,7 +44,7 @@ function AlertActionButtons({
|
||||
const { handleAlertDuplicate } = useAlertRuleDuplicate({
|
||||
alertDetails: (alertDetails as unknown) as AlertDef,
|
||||
});
|
||||
const { handleAlertDelete } = useAlertRuleDelete({ ruleId: Number(ruleId) });
|
||||
const { handleAlertDelete } = useAlertRuleDelete({ ruleId });
|
||||
const { handleAlertUpdate, isLoading } = useAlertRuleUpdate({
|
||||
alertDetails: (alertDetails as unknown) as AlertDef,
|
||||
setUpdatedName,
|
||||
|
@ -153,7 +153,7 @@ type Props = {
|
||||
export const useGetAlertRuleDetails = (): Props => {
|
||||
const { ruleId } = useAlertHistoryQueryParams();
|
||||
|
||||
const isValidRuleId = ruleId !== null && String(ruleId).length !== 0;
|
||||
const isValidRuleId = ruleId !== null && ruleId !== '';
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
@ -163,7 +163,7 @@ export const useGetAlertRuleDetails = (): Props => {
|
||||
} = useQuery([REACT_QUERY_KEY.ALERT_RULE_DETAILS, ruleId], {
|
||||
queryFn: () =>
|
||||
get({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
}),
|
||||
enabled: isValidRuleId,
|
||||
refetchOnWindowFocus: false,
|
||||
@ -204,7 +204,7 @@ export const useGetAlertRuleDetailsStats = (): GetAlertRuleDetailsStatsProps =>
|
||||
{
|
||||
queryFn: () =>
|
||||
ruleStats({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
}),
|
||||
@ -234,7 +234,7 @@ export const useGetAlertRuleDetailsTopContributors = (): GetAlertRuleDetailsTopC
|
||||
{
|
||||
queryFn: () =>
|
||||
topContributors({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
}),
|
||||
@ -287,7 +287,7 @@ export const useGetAlertRuleDetailsTimelineTable = ({
|
||||
{
|
||||
queryFn: () =>
|
||||
timelineTable({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
limit: TIMELINE_TABLE_PAGE_SIZE,
|
||||
@ -410,7 +410,7 @@ export const useAlertRuleStatusToggle = ({
|
||||
|
||||
const handleAlertStateToggle = (): void => {
|
||||
const args = {
|
||||
id: parseInt(ruleId, 10),
|
||||
id: ruleId,
|
||||
data: { disabled: alertRuleState !== 'disabled' },
|
||||
};
|
||||
toggleAlertState(args);
|
||||
@ -512,7 +512,7 @@ export const useAlertRuleUpdate = ({
|
||||
export const useAlertRuleDelete = ({
|
||||
ruleId,
|
||||
}: {
|
||||
ruleId: number;
|
||||
ruleId: string;
|
||||
}): {
|
||||
handleAlertDelete: () => void;
|
||||
} => {
|
||||
@ -560,7 +560,7 @@ export const useGetAlertRuleDetailsTimelineGraphData = (): GetAlertRuleDetailsTi
|
||||
{
|
||||
queryFn: () =>
|
||||
timelineGraph({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
}),
|
||||
|
@ -34,7 +34,7 @@ function EditRules(): JSX.Element {
|
||||
{
|
||||
queryFn: () =>
|
||||
get({
|
||||
id: parseInt(ruleId || '', 10),
|
||||
id: ruleId || '',
|
||||
}),
|
||||
enabled: isValidRuleId,
|
||||
refetchOnMount: false,
|
||||
@ -90,10 +90,7 @@ function EditRules(): JSX.Element {
|
||||
|
||||
return (
|
||||
<div className="edit-rules-container">
|
||||
<EditRulesContainer
|
||||
ruleId={parseInt(ruleId, 10)}
|
||||
initialValue={data.payload.data}
|
||||
/>
|
||||
<EditRulesContainer ruleId={ruleId || ''} initialValue={data.payload.data} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ export const defaultAlgorithm = 'standard';
|
||||
export const defaultSeasonality = 'hourly';
|
||||
|
||||
export interface AlertDef {
|
||||
id?: number;
|
||||
id?: string;
|
||||
alertType?: string;
|
||||
alert: string;
|
||||
ruleType?: string;
|
||||
|
@ -5,7 +5,7 @@ export interface Props {
|
||||
}
|
||||
|
||||
export interface GettableAlert extends AlertDef {
|
||||
id: number;
|
||||
id: string;
|
||||
alert: string;
|
||||
state: string;
|
||||
disabled: boolean;
|
||||
|
@ -7,6 +7,6 @@ export interface PatchProps {
|
||||
}
|
||||
|
||||
export interface Props {
|
||||
id?: number;
|
||||
id?: string;
|
||||
data: PatchProps;
|
||||
}
|
||||
|
@ -6,6 +6,6 @@ export type PayloadProps = {
|
||||
};
|
||||
|
||||
export interface Props {
|
||||
id?: number;
|
||||
id?: string;
|
||||
data: AlertDef;
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ package sqlalertmanagerstore
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"strconv"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@ -191,9 +190,9 @@ func (store *config) ListAllChannels(ctx context.Context) ([]*alertmanagertypes.
|
||||
|
||||
func (store *config) GetMatchers(ctx context.Context, orgID string) (map[string][]string, error) {
|
||||
type matcher struct {
|
||||
bun.BaseModel `bun:"table:rules"`
|
||||
ID int `bun:"id,pk"`
|
||||
Data string `bun:"data"`
|
||||
bun.BaseModel `bun:"table:rule"`
|
||||
ID valuer.UUID `bun:"id,pk"`
|
||||
Data string `bun:"data"`
|
||||
}
|
||||
|
||||
matchers := []matcher{}
|
||||
@ -213,7 +212,7 @@ func (store *config) GetMatchers(ctx context.Context, orgID string) (map[string]
|
||||
for _, matcher := range matchers {
|
||||
receivers := gjson.Get(matcher.Data, "preferredChannels").Array()
|
||||
for _, receiver := range receivers {
|
||||
matchersMap[strconv.Itoa(matcher.ID)] = append(matchersMap[strconv.Itoa(matcher.ID)], receiver.String())
|
||||
matchersMap[matcher.ID.StringValue()] = append(matchersMap[matcher.ID.StringValue()], receiver.String())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -160,6 +160,17 @@ func (service *Service) newServer(ctx context.Context, orgID string) (*alertmana
|
||||
return nil, err
|
||||
}
|
||||
|
||||
beforeCompareAndSelectHash := config.StoreableConfig().Hash
|
||||
config, err = service.compareAndSelectConfig(ctx, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if beforeCompareAndSelectHash == config.StoreableConfig().Hash {
|
||||
service.settings.Logger().Debug("skipping config store update for org", "orgID", orgID, "hash", config.StoreableConfig().Hash)
|
||||
return server, nil
|
||||
}
|
||||
|
||||
err = service.configStore.Set(ctx, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -191,6 +202,38 @@ func (service *Service) getConfig(ctx context.Context, orgID string) (*alertmana
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func (service *Service) compareAndSelectConfig(ctx context.Context, incomingConfig *alertmanagertypes.Config) (*alertmanagertypes.Config, error) {
|
||||
channels, err := service.configStore.ListChannels(ctx, incomingConfig.StoreableConfig().OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
matchers, err := service.configStore.GetMatchers(ctx, incomingConfig.StoreableConfig().OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config, err := alertmanagertypes.NewConfigFromChannels(service.config.Global, service.config.Route, channels, incomingConfig.StoreableConfig().OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for ruleID, receivers := range matchers {
|
||||
err = config.CreateRuleIDMatcher(ruleID, receivers)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if incomingConfig.StoreableConfig().Hash != config.StoreableConfig().Hash {
|
||||
service.settings.Logger().InfoContext(ctx, "mismatch found, updating config to match channels and matchers")
|
||||
return config, nil
|
||||
}
|
||||
|
||||
return incomingConfig, nil
|
||||
|
||||
}
|
||||
|
||||
// getServer returns the server for the given orgID. It should be called with the lock held.
|
||||
func (service *Service) getServer(orgID string) (*alertmanagerserver.Server, error) {
|
||||
server, ok := service.servers[orgID]
|
||||
|
@ -59,6 +59,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
@ -734,9 +735,15 @@ func (aH *APIHandler) PopulateTemporality(ctx context.Context, qp *v3.QueryRange
|
||||
}
|
||||
|
||||
func (aH *APIHandler) listDowntimeSchedules(w http.ResponseWriter, r *http.Request) {
|
||||
schedules, err := aH.ruleManager.RuleDB().GetAllPlannedMaintenance(r.Context())
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
schedules, err := aH.ruleManager.MaintenanceStore().GetAllPlannedMaintenance(r.Context(), claims.OrgID)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
@ -744,7 +751,7 @@ func (aH *APIHandler) listDowntimeSchedules(w http.ResponseWriter, r *http.Reque
|
||||
// Since the number of schedules is expected to be small, this should be fine
|
||||
|
||||
if r.URL.Query().Get("active") != "" {
|
||||
activeSchedules := make([]rules.PlannedMaintenance, 0)
|
||||
activeSchedules := make([]*ruletypes.GettablePlannedMaintenance, 0)
|
||||
active, _ := strconv.ParseBool(r.URL.Query().Get("active"))
|
||||
for _, schedule := range schedules {
|
||||
now := time.Now().In(time.FixedZone(schedule.Schedule.Timezone, 0))
|
||||
@ -756,7 +763,7 @@ func (aH *APIHandler) listDowntimeSchedules(w http.ResponseWriter, r *http.Reque
|
||||
}
|
||||
|
||||
if r.URL.Query().Get("recurring") != "" {
|
||||
recurringSchedules := make([]rules.PlannedMaintenance, 0)
|
||||
recurringSchedules := make([]*ruletypes.GettablePlannedMaintenance, 0)
|
||||
recurring, _ := strconv.ParseBool(r.URL.Query().Get("recurring"))
|
||||
for _, schedule := range schedules {
|
||||
if schedule.IsRecurring() == recurring {
|
||||
@ -770,62 +777,83 @@ func (aH *APIHandler) listDowntimeSchedules(w http.ResponseWriter, r *http.Reque
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getDowntimeSchedule(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
schedule, err := aH.ruleManager.RuleDB().GetPlannedMaintenanceByID(r.Context(), id)
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
schedule, err := aH.ruleManager.MaintenanceStore().GetPlannedMaintenanceByID(r.Context(), id)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, schedule)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) createDowntimeSchedule(w http.ResponseWriter, r *http.Request) {
|
||||
var schedule rules.PlannedMaintenance
|
||||
var schedule ruletypes.GettablePlannedMaintenance
|
||||
err := json.NewDecoder(r.Body).Decode(&schedule)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
return
|
||||
}
|
||||
if err := schedule.Validate(); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = aH.ruleManager.RuleDB().CreatePlannedMaintenance(r.Context(), schedule)
|
||||
_, err = aH.ruleManager.MaintenanceStore().CreatePlannedMaintenance(r.Context(), schedule)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
aH.Respond(w, nil)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) editDowntimeSchedule(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
var schedule rules.PlannedMaintenance
|
||||
err := json.NewDecoder(r.Body).Decode(&schedule)
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
var schedule ruletypes.GettablePlannedMaintenance
|
||||
err = json.NewDecoder(r.Body).Decode(&schedule)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
if err := schedule.Validate(); err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorBadData, Err: err}, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
_, err = aH.ruleManager.RuleDB().EditPlannedMaintenance(r.Context(), schedule, id)
|
||||
|
||||
err = aH.ruleManager.MaintenanceStore().EditPlannedMaintenance(r.Context(), schedule, id)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, nil)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) deleteDowntimeSchedule(w http.ResponseWriter, r *http.Request) {
|
||||
id := mux.Vars(r)["id"]
|
||||
_, err := aH.ruleManager.RuleDB().DeletePlannedMaintenance(r.Context(), id)
|
||||
idStr := mux.Vars(r)["id"]
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: err}, nil)
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeInvalidInput, errorsV2.CodeInvalidInput, err.Error()))
|
||||
return
|
||||
}
|
||||
|
||||
err = aH.ruleManager.MaintenanceStore().DeletePlannedMaintenance(r.Context(), id)
|
||||
if err != nil {
|
||||
render.Error(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, nil)
|
||||
}
|
||||
|
||||
@ -929,12 +957,12 @@ func (aH *APIHandler) getOverallStateTransitions(w http.ResponseWriter, r *http.
|
||||
aH.Respond(w, stateItems)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) metaForLinks(ctx context.Context, rule *rules.GettableRule) ([]v3.FilterItem, []v3.AttributeKey, map[string]v3.AttributeKey) {
|
||||
func (aH *APIHandler) metaForLinks(ctx context.Context, rule *ruletypes.GettableRule) ([]v3.FilterItem, []v3.AttributeKey, map[string]v3.AttributeKey) {
|
||||
filterItems := []v3.FilterItem{}
|
||||
groupBy := []v3.AttributeKey{}
|
||||
keys := make(map[string]v3.AttributeKey)
|
||||
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
logFields, err := aH.reader.GetLogFields(ctx)
|
||||
if err == nil {
|
||||
params := &v3.QueryRangeParamsV3{
|
||||
@ -944,7 +972,7 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *rules.GettableRule
|
||||
} else {
|
||||
zap.L().Error("failed to get log fields using empty keys; the link might not work as expected", zap.Error(err))
|
||||
}
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
traceFields, err := aH.reader.GetSpanAttributeKeys(ctx)
|
||||
if err == nil {
|
||||
keys = traceFields
|
||||
@ -953,7 +981,7 @@ func (aH *APIHandler) metaForLinks(ctx context.Context, rule *rules.GettableRule
|
||||
}
|
||||
}
|
||||
|
||||
if rule.AlertType == rules.AlertTypeLogs || rule.AlertType == rules.AlertTypeTraces {
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs || rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
if rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.QueryType() == v3.QueryTypeBuilder {
|
||||
selectedQuery := rule.RuleCondition.GetSelectedQueryName()
|
||||
@ -1006,9 +1034,9 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
|
||||
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
|
||||
// to get the correct query range
|
||||
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
res.Items[idx].RelatedTracesLink = contextlinks.PrepareLinksToTraces(start, end, newFilters)
|
||||
}
|
||||
}
|
||||
@ -1044,9 +1072,9 @@ func (aH *APIHandler) getRuleStateHistoryTopContributors(w http.ResponseWriter,
|
||||
newFilters := contextlinks.PrepareFilters(lbls, filterItems, groupBy, keys)
|
||||
end := time.Unix(params.End/1000, 0)
|
||||
start := time.Unix(params.Start/1000, 0)
|
||||
if rule.AlertType == rules.AlertTypeLogs {
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
res[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
} else if rule.AlertType == rules.AlertTypeTraces {
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
res[idx].RelatedTracesLink = contextlinks.PrepareLinksToTraces(start, end, newFilters)
|
||||
}
|
||||
}
|
||||
|
@ -7,11 +7,11 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
@ -34,7 +34,7 @@ type IntegrationAssets struct {
|
||||
Logs LogsAssets `json:"logs"`
|
||||
Dashboards []types.DashboardData `json:"dashboards"`
|
||||
|
||||
Alerts []rules.PostableRule `json:"alerts"`
|
||||
Alerts []ruletypes.PostableRule `json:"alerts"`
|
||||
}
|
||||
|
||||
type LogsAssets struct {
|
||||
|
@ -10,10 +10,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
@ -129,7 +129,7 @@ func (t *TestAvailableIntegrationsRepo) list(
|
||||
},
|
||||
},
|
||||
Dashboards: []types.DashboardData{},
|
||||
Alerts: []rules.PostableRule{},
|
||||
Alerts: []ruletypes.PostableRule{},
|
||||
},
|
||||
ConnectionTests: &IntegrationConnectionTests{
|
||||
Logs: &LogsConnectionTest{
|
||||
@ -197,7 +197,7 @@ func (t *TestAvailableIntegrationsRepo) list(
|
||||
},
|
||||
},
|
||||
Dashboards: []types.DashboardData{},
|
||||
Alerts: []rules.PostableRule{},
|
||||
Alerts: []ruletypes.PostableRule{},
|
||||
},
|
||||
ConnectionTests: &IntegrationConnectionTests{
|
||||
Logs: &LogsConnectionTest{
|
||||
|
@ -382,11 +382,11 @@ func (s *Server) initListeners() error {
|
||||
}
|
||||
|
||||
// Start listening on http and private http port concurrently
|
||||
func (s *Server) Start() error {
|
||||
func (s *Server) Start(ctx context.Context) error {
|
||||
|
||||
// initiate rule manager first
|
||||
if !s.serverOptions.DisableRules {
|
||||
s.ruleManager.Start()
|
||||
s.ruleManager.Start(ctx)
|
||||
} else {
|
||||
zap.L().Info("msg: Rules disabled as rules.disable is set to TRUE")
|
||||
}
|
||||
@ -454,7 +454,7 @@ func (s *Server) Start() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Server) Stop() error {
|
||||
func (s *Server) Stop(ctx context.Context) error {
|
||||
if s.httpServer != nil {
|
||||
if err := s.httpServer.Shutdown(context.Background()); err != nil {
|
||||
return err
|
||||
@ -470,7 +470,7 @@ func (s *Server) Stop() error {
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
s.ruleManager.Stop()
|
||||
s.ruleManager.Stop(ctx)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -135,7 +135,7 @@ func main() {
|
||||
logger.Fatal("Failed to create server", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := server.Start(); err != nil {
|
||||
if err := server.Start(context.Background()); err != nil {
|
||||
logger.Fatal("Could not start servers", zap.Error(err))
|
||||
}
|
||||
|
||||
@ -149,7 +149,7 @@ func main() {
|
||||
zap.L().Fatal("Failed to start signoz", zap.Error(err))
|
||||
}
|
||||
|
||||
err = server.Stop()
|
||||
err = server.Stop(context.Background())
|
||||
if err != nil {
|
||||
zap.L().Fatal("Failed to stop server", zap.Error(err))
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@ -25,9 +26,9 @@ type BaseRule struct {
|
||||
handledRestart bool
|
||||
|
||||
// Type of the rule
|
||||
typ AlertType
|
||||
typ ruletypes.AlertType
|
||||
|
||||
ruleCondition *RuleCondition
|
||||
ruleCondition *ruletypes.RuleCondition
|
||||
// evalWindow is the time window used for evaluating the rule
|
||||
// i.e each time we lookback from the current time, we look at data for the last
|
||||
// evalWindow duration
|
||||
@ -52,9 +53,9 @@ type BaseRule struct {
|
||||
// the timestamp of the last evaluation
|
||||
evaluationTimestamp time.Time
|
||||
|
||||
health RuleHealth
|
||||
health ruletypes.RuleHealth
|
||||
lastError error
|
||||
Active map[uint64]*Alert
|
||||
Active map[uint64]*ruletypes.Alert
|
||||
|
||||
// lastTimestampWithDatapoints is the timestamp of the last datapoint we observed
|
||||
// for this rule
|
||||
@ -115,7 +116,7 @@ func WithSQLStore(sqlstore sqlstore.SQLStore) RuleOption {
|
||||
}
|
||||
}
|
||||
|
||||
func NewBaseRule(id string, p *PostableRule, reader interfaces.Reader, opts ...RuleOption) (*BaseRule, error) {
|
||||
func NewBaseRule(id string, p *ruletypes.PostableRule, reader interfaces.Reader, opts ...RuleOption) (*BaseRule, error) {
|
||||
if p.RuleCondition == nil || !p.RuleCondition.IsValid() {
|
||||
return nil, fmt.Errorf("invalid rule condition")
|
||||
}
|
||||
@ -130,8 +131,8 @@ func NewBaseRule(id string, p *PostableRule, reader interfaces.Reader, opts ...R
|
||||
labels: qslabels.FromMap(p.Labels),
|
||||
annotations: qslabels.FromMap(p.Annotations),
|
||||
preferredChannels: p.PreferredChannels,
|
||||
health: HealthUnknown,
|
||||
Active: map[uint64]*Alert{},
|
||||
health: ruletypes.HealthUnknown,
|
||||
Active: map[uint64]*ruletypes.Alert{},
|
||||
reader: reader,
|
||||
TemporalityMap: make(map[string]map[v3.Temporality]bool),
|
||||
}
|
||||
@ -163,25 +164,25 @@ func (r *BaseRule) targetVal() float64 {
|
||||
return value.F
|
||||
}
|
||||
|
||||
func (r *BaseRule) matchType() MatchType {
|
||||
func (r *BaseRule) matchType() ruletypes.MatchType {
|
||||
if r.ruleCondition == nil {
|
||||
return AtleastOnce
|
||||
return ruletypes.AtleastOnce
|
||||
}
|
||||
return r.ruleCondition.MatchType
|
||||
}
|
||||
|
||||
func (r *BaseRule) compareOp() CompareOp {
|
||||
func (r *BaseRule) compareOp() ruletypes.CompareOp {
|
||||
if r.ruleCondition == nil {
|
||||
return ValueIsEq
|
||||
return ruletypes.ValueIsEq
|
||||
}
|
||||
return r.ruleCondition.CompareOp
|
||||
}
|
||||
|
||||
func (r *BaseRule) currentAlerts() []*Alert {
|
||||
func (r *BaseRule) currentAlerts() []*ruletypes.Alert {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
alerts := make([]*Alert, 0, len(r.Active))
|
||||
alerts := make([]*ruletypes.Alert, 0, len(r.Active))
|
||||
for _, a := range r.Active {
|
||||
anew := *a
|
||||
alerts = append(alerts, &anew)
|
||||
@ -216,15 +217,15 @@ func (r *ThresholdRule) hostFromSource() string {
|
||||
return fmt.Sprintf("%s://%s", parsedUrl.Scheme, parsedUrl.Hostname())
|
||||
}
|
||||
|
||||
func (r *BaseRule) ID() string { return r.id }
|
||||
func (r *BaseRule) Name() string { return r.name }
|
||||
func (r *BaseRule) Condition() *RuleCondition { return r.ruleCondition }
|
||||
func (r *BaseRule) Labels() qslabels.BaseLabels { return r.labels }
|
||||
func (r *BaseRule) Annotations() qslabels.BaseLabels { return r.annotations }
|
||||
func (r *BaseRule) PreferredChannels() []string { return r.preferredChannels }
|
||||
func (r *BaseRule) ID() string { return r.id }
|
||||
func (r *BaseRule) Name() string { return r.name }
|
||||
func (r *BaseRule) Condition() *ruletypes.RuleCondition { return r.ruleCondition }
|
||||
func (r *BaseRule) Labels() qslabels.BaseLabels { return r.labels }
|
||||
func (r *BaseRule) Annotations() qslabels.BaseLabels { return r.annotations }
|
||||
func (r *BaseRule) PreferredChannels() []string { return r.preferredChannels }
|
||||
|
||||
func (r *BaseRule) GeneratorURL() string {
|
||||
return prepareRuleGeneratorURL(r.ID(), r.source)
|
||||
return ruletypes.PrepareRuleGeneratorURL(r.ID(), r.source)
|
||||
}
|
||||
|
||||
func (r *BaseRule) Unit() string {
|
||||
@ -261,13 +262,13 @@ func (r *BaseRule) LastError() error {
|
||||
return r.lastError
|
||||
}
|
||||
|
||||
func (r *BaseRule) SetHealth(health RuleHealth) {
|
||||
func (r *BaseRule) SetHealth(health ruletypes.RuleHealth) {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
r.health = health
|
||||
}
|
||||
|
||||
func (r *BaseRule) Health() RuleHealth {
|
||||
func (r *BaseRule) Health() ruletypes.RuleHealth {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
return r.health
|
||||
@ -307,8 +308,8 @@ func (r *BaseRule) State() model.AlertState {
|
||||
return maxState
|
||||
}
|
||||
|
||||
func (r *BaseRule) ActiveAlerts() []*Alert {
|
||||
var res []*Alert
|
||||
func (r *BaseRule) ActiveAlerts() []*ruletypes.Alert {
|
||||
var res []*ruletypes.Alert
|
||||
for _, a := range r.currentAlerts() {
|
||||
if a.ResolvedAt.IsZero() {
|
||||
res = append(res, a)
|
||||
@ -332,9 +333,9 @@ func (r *BaseRule) SendAlerts(ctx context.Context, ts time.Time, resendDelay tim
|
||||
return
|
||||
}
|
||||
|
||||
alerts := []*Alert{}
|
||||
r.ForEachActiveAlert(func(alert *Alert) {
|
||||
if alert.needsSending(ts, resendDelay) {
|
||||
alerts := []*ruletypes.Alert{}
|
||||
r.ForEachActiveAlert(func(alert *ruletypes.Alert) {
|
||||
if alert.NeedsSending(ts, resendDelay) {
|
||||
alert.LastSentAt = ts
|
||||
delta := resendDelay
|
||||
if interval > resendDelay {
|
||||
@ -348,7 +349,7 @@ func (r *BaseRule) SendAlerts(ctx context.Context, ts time.Time, resendDelay tim
|
||||
notifyFunc(ctx, orgID, "", alerts...)
|
||||
}
|
||||
|
||||
func (r *BaseRule) ForEachActiveAlert(f func(*Alert)) {
|
||||
func (r *BaseRule) ForEachActiveAlert(f func(*ruletypes.Alert)) {
|
||||
r.mtx.Lock()
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
@ -357,8 +358,8 @@ func (r *BaseRule) ForEachActiveAlert(f func(*Alert)) {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
var alertSmpl Sample
|
||||
func (r *BaseRule) ShouldAlert(series v3.Series) (ruletypes.Sample, bool) {
|
||||
var alertSmpl ruletypes.Sample
|
||||
var shouldAlert bool
|
||||
var lbls qslabels.Labels
|
||||
|
||||
@ -381,54 +382,54 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
}
|
||||
|
||||
switch r.matchType() {
|
||||
case AtleastOnce:
|
||||
case ruletypes.AtleastOnce:
|
||||
// If any sample matches the condition, the rule is firing.
|
||||
if r.compareOp() == ValueIsAbove {
|
||||
if r.compareOp() == ruletypes.ValueIsAbove {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value > r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsBelow {
|
||||
} else if r.compareOp() == ruletypes.ValueIsBelow {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value < r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsEq {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsNotEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsNotEq {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueOutsideBounds {
|
||||
} else if r.compareOp() == ruletypes.ValueOutsideBounds {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) >= r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
case AllTheTimes:
|
||||
case ruletypes.AllTheTimes:
|
||||
// If all samples match the condition, the rule is firing.
|
||||
shouldAlert = true
|
||||
alertSmpl = Sample{Point: Point{V: r.targetVal()}, Metric: lbls}
|
||||
if r.compareOp() == ValueIsAbove {
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: r.targetVal()}, Metric: lbls}
|
||||
if r.compareOp() == ruletypes.ValueIsAbove {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value <= r.targetVal() {
|
||||
shouldAlert = false
|
||||
@ -443,9 +444,9 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
minValue = smpl.Value
|
||||
}
|
||||
}
|
||||
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: minValue}, Metric: lbls}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsBelow {
|
||||
} else if r.compareOp() == ruletypes.ValueIsBelow {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value >= r.targetVal() {
|
||||
shouldAlert = false
|
||||
@ -459,16 +460,16 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
maxValue = smpl.Value
|
||||
}
|
||||
}
|
||||
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: maxValue}, Metric: lbls}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsEq {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value != r.targetVal() {
|
||||
shouldAlert = false
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueIsNotEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsNotEq {
|
||||
for _, smpl := range series.Points {
|
||||
if smpl.Value == r.targetVal() {
|
||||
shouldAlert = false
|
||||
@ -479,21 +480,21 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
if shouldAlert {
|
||||
for _, smpl := range series.Points {
|
||||
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if r.compareOp() == ValueOutsideBounds {
|
||||
} else if r.compareOp() == ruletypes.ValueOutsideBounds {
|
||||
for _, smpl := range series.Points {
|
||||
if math.Abs(smpl.Value) < r.targetVal() {
|
||||
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: smpl.Value}, Metric: lbls}
|
||||
shouldAlert = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
case OnAverage:
|
||||
case ruletypes.OnAverage:
|
||||
// If the average of all samples matches the condition, the rule is firing.
|
||||
var sum, count float64
|
||||
for _, smpl := range series.Points {
|
||||
@ -504,29 +505,29 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
count++
|
||||
}
|
||||
avg := sum / count
|
||||
alertSmpl = Sample{Point: Point{V: avg}, Metric: lbls}
|
||||
if r.compareOp() == ValueIsAbove {
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: avg}, Metric: lbls}
|
||||
if r.compareOp() == ruletypes.ValueIsAbove {
|
||||
if avg > r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsBelow {
|
||||
} else if r.compareOp() == ruletypes.ValueIsBelow {
|
||||
if avg < r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsEq {
|
||||
if avg == r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsNotEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsNotEq {
|
||||
if avg != r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueOutsideBounds {
|
||||
} else if r.compareOp() == ruletypes.ValueOutsideBounds {
|
||||
if math.Abs(avg) >= r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
}
|
||||
case InTotal:
|
||||
case ruletypes.InTotal:
|
||||
// If the sum of all samples matches the condition, the rule is firing.
|
||||
var sum float64
|
||||
|
||||
@ -536,45 +537,45 @@ func (r *BaseRule) ShouldAlert(series v3.Series) (Sample, bool) {
|
||||
}
|
||||
sum += smpl.Value
|
||||
}
|
||||
alertSmpl = Sample{Point: Point{V: sum}, Metric: lbls}
|
||||
if r.compareOp() == ValueIsAbove {
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: sum}, Metric: lbls}
|
||||
if r.compareOp() == ruletypes.ValueIsAbove {
|
||||
if sum > r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsBelow {
|
||||
} else if r.compareOp() == ruletypes.ValueIsBelow {
|
||||
if sum < r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsEq {
|
||||
if sum == r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsNotEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsNotEq {
|
||||
if sum != r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueOutsideBounds {
|
||||
} else if r.compareOp() == ruletypes.ValueOutsideBounds {
|
||||
if math.Abs(sum) >= r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
}
|
||||
case Last:
|
||||
case ruletypes.Last:
|
||||
// If the last sample matches the condition, the rule is firing.
|
||||
shouldAlert = false
|
||||
alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
|
||||
if r.compareOp() == ValueIsAbove {
|
||||
alertSmpl = ruletypes.Sample{Point: ruletypes.Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls}
|
||||
if r.compareOp() == ruletypes.ValueIsAbove {
|
||||
if series.Points[len(series.Points)-1].Value > r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsBelow {
|
||||
} else if r.compareOp() == ruletypes.ValueIsBelow {
|
||||
if series.Points[len(series.Points)-1].Value < r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsEq {
|
||||
if series.Points[len(series.Points)-1].Value == r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
} else if r.compareOp() == ValueIsNotEq {
|
||||
} else if r.compareOp() == ruletypes.ValueIsNotEq {
|
||||
if series.Points[len(series.Points)-1].Value != r.targetVal() {
|
||||
shouldAlert = true
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"testing"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
)
|
||||
|
||||
func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
@ -17,7 +18,7 @@ func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
{
|
||||
name: "test should skip if less than min points",
|
||||
rule: &BaseRule{
|
||||
ruleCondition: &RuleCondition{
|
||||
ruleCondition: &ruletypes.RuleCondition{
|
||||
RequireMinPoints: true,
|
||||
RequiredNumPoints: 4,
|
||||
},
|
||||
@ -33,11 +34,11 @@ func TestBaseRule_RequireMinPoints(t *testing.T) {
|
||||
{
|
||||
name: "test should alert if more than min points",
|
||||
rule: &BaseRule{
|
||||
ruleCondition: &RuleCondition{
|
||||
ruleCondition: &ruletypes.RuleCondition{
|
||||
RequireMinPoints: true,
|
||||
RequiredNumPoints: 4,
|
||||
CompareOp: ValueIsAbove,
|
||||
MatchType: AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &threshold,
|
||||
},
|
||||
},
|
||||
|
@ -1,387 +0,0 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
// Data store to capture user alert rule settings
|
||||
type RuleDB interface {
|
||||
// CreateRule stores rule in the db and returns tx and group name (on success)
|
||||
CreateRule(context.Context, *StoredRule, func(context.Context, int64) error) (int64, error)
|
||||
|
||||
// EditRuleTx updates the given rule in the db and returns tx and group name (on success)
|
||||
EditRule(context.Context, *StoredRule, func(context.Context) error) error
|
||||
|
||||
// DeleteRuleTx deletes the given rule in the db and returns tx and group name (on success)
|
||||
DeleteRule(context.Context, string, func(context.Context) error) error
|
||||
|
||||
// GetStoredRules fetches the rule definitions from db
|
||||
GetStoredRules(ctx context.Context) ([]StoredRule, error)
|
||||
|
||||
// GetStoredRule for a given ID from DB
|
||||
GetStoredRule(ctx context.Context, id string) (*StoredRule, error)
|
||||
|
||||
// CreatePlannedMaintenance stores a given maintenance in db
|
||||
CreatePlannedMaintenance(ctx context.Context, maintenance PlannedMaintenance) (int64, error)
|
||||
|
||||
// DeletePlannedMaintenance deletes the given maintenance in the db
|
||||
DeletePlannedMaintenance(ctx context.Context, id string) (string, error)
|
||||
|
||||
// GetPlannedMaintenanceByID fetches the maintenance definition from db by id
|
||||
GetPlannedMaintenanceByID(ctx context.Context, id string) (*PlannedMaintenance, error)
|
||||
|
||||
// EditPlannedMaintenance updates the given maintenance in the db
|
||||
EditPlannedMaintenance(ctx context.Context, maintenance PlannedMaintenance, id string) (string, error)
|
||||
|
||||
// GetAllPlannedMaintenance fetches the maintenance definitions from db
|
||||
GetAllPlannedMaintenance(ctx context.Context) ([]PlannedMaintenance, error)
|
||||
|
||||
// used for internal telemetry
|
||||
GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error)
|
||||
}
|
||||
|
||||
type StoredRule struct {
|
||||
bun.BaseModel `bun:"rules"`
|
||||
|
||||
Id int `json:"id" db:"id" bun:"id,pk,autoincrement"`
|
||||
CreatedAt *time.Time `json:"created_at" db:"created_at" bun:"created_at"`
|
||||
CreatedBy *string `json:"created_by" db:"created_by" bun:"created_by"`
|
||||
UpdatedAt *time.Time `json:"updated_at" db:"updated_at" bun:"updated_at"`
|
||||
UpdatedBy *string `json:"updated_by" db:"updated_by" bun:"updated_by"`
|
||||
Data string `json:"data" db:"data" bun:"data"`
|
||||
}
|
||||
|
||||
type ruleDB struct {
|
||||
*sqlx.DB
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewRuleDB(db *sqlx.DB, sqlstore sqlstore.SQLStore) RuleDB {
|
||||
return &ruleDB{db, sqlstore}
|
||||
}
|
||||
|
||||
// CreateRule stores a given rule in db and returns task name and error (if any)
|
||||
func (r *ruleDB) CreateRule(ctx context.Context, storedRule *StoredRule, cb func(context.Context, int64) error) (int64, error) {
|
||||
err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(storedRule).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx, int64(storedRule.Id))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return int64(storedRule.Id), nil
|
||||
}
|
||||
|
||||
// EditRule stores a given rule string in database and returns task name and error (if any)
|
||||
func (r *ruleDB) EditRule(ctx context.Context, storedRule *StoredRule, cb func(context.Context) error) error {
|
||||
return r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(storedRule).
|
||||
WherePK().
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
// DeleteRule deletes a given rule with id and returns taskname and error (if any)
|
||||
func (r *ruleDB) DeleteRule(ctx context.Context, id string, cb func(context.Context) error) error {
|
||||
if err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&StoredRule{}).
|
||||
Where("id = ?", id).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetStoredRules(ctx context.Context) ([]StoredRule, error) {
|
||||
|
||||
rules := []StoredRule{}
|
||||
|
||||
query := "SELECT id, created_at, created_by, updated_at, updated_by, data FROM rules"
|
||||
|
||||
err := r.Select(&rules, query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetStoredRule(ctx context.Context, id string) (*StoredRule, error) {
|
||||
intId, err := strconv.Atoi(id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid id parameter")
|
||||
}
|
||||
|
||||
rule := &StoredRule{}
|
||||
|
||||
query := fmt.Sprintf("SELECT id, created_at, created_by, updated_at, updated_by, data FROM rules WHERE id=%d", intId)
|
||||
err = r.Get(rule, query)
|
||||
|
||||
// zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetAllPlannedMaintenance(ctx context.Context) ([]PlannedMaintenance, error) {
|
||||
maintenances := []PlannedMaintenance{}
|
||||
|
||||
query := "SELECT id, name, description, schedule, alert_ids, created_at, created_by, updated_at, updated_by FROM planned_maintenance"
|
||||
|
||||
err := r.Select(&maintenances, query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return maintenances, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetPlannedMaintenanceByID(ctx context.Context, id string) (*PlannedMaintenance, error) {
|
||||
maintenance := &PlannedMaintenance{}
|
||||
|
||||
query := "SELECT id, name, description, schedule, alert_ids, created_at, created_by, updated_at, updated_by FROM planned_maintenance WHERE id=$1"
|
||||
err := r.Get(maintenance, query, id)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return maintenance, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) CreatePlannedMaintenance(ctx context.Context, maintenance PlannedMaintenance) (int64, error) {
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return 0, errors.New("no claims found in context")
|
||||
}
|
||||
maintenance.CreatedBy = claims.Email
|
||||
maintenance.CreatedAt = time.Now()
|
||||
maintenance.UpdatedBy = claims.Email
|
||||
maintenance.UpdatedAt = time.Now()
|
||||
|
||||
query := "INSERT INTO planned_maintenance (name, description, schedule, alert_ids, created_at, created_by, updated_at, updated_by) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)"
|
||||
|
||||
result, err := r.Exec(query, maintenance.Name, maintenance.Description, maintenance.Schedule, maintenance.AlertIds, maintenance.CreatedAt, maintenance.CreatedBy, maintenance.UpdatedAt, maintenance.UpdatedBy)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return result.LastInsertId()
|
||||
}
|
||||
|
||||
func (r *ruleDB) DeletePlannedMaintenance(ctx context.Context, id string) (string, error) {
|
||||
query := "DELETE FROM planned_maintenance WHERE id=$1"
|
||||
_, err := r.Exec(query, id)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) EditPlannedMaintenance(ctx context.Context, maintenance PlannedMaintenance, id string) (string, error) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return "", errors.New("no claims found in context")
|
||||
}
|
||||
maintenance.UpdatedBy = claims.Email
|
||||
maintenance.UpdatedAt = time.Now()
|
||||
|
||||
query := "UPDATE planned_maintenance SET name=$1, description=$2, schedule=$3, alert_ids=$4, updated_at=$5, updated_by=$6 WHERE id=$7"
|
||||
_, err := r.Exec(query, maintenance.Name, maintenance.Description, maintenance.Schedule, maintenance.AlertIds, maintenance.UpdatedAt, maintenance.UpdatedBy, id)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) getChannels() (*[]model.ChannelItem, *model.ApiError) {
|
||||
channels := []model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data data FROM notification_channels"
|
||||
|
||||
err := r.Select(&channels, query)
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channels, nil
|
||||
}
|
||||
|
||||
func (r *ruleDB) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
alertsInfo := model.AlertsInfo{}
|
||||
// fetch alerts from rules db
|
||||
query := "SELECT data FROM rules"
|
||||
var alertsData []string
|
||||
var alertNames []string
|
||||
err := r.Select(&alertsData, query)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return &alertsInfo, err
|
||||
}
|
||||
for _, alert := range alertsData {
|
||||
var rule GettableRule
|
||||
if strings.Contains(alert, "time_series_v2") {
|
||||
alertsInfo.AlertsWithTSV2 = alertsInfo.AlertsWithTSV2 + 1
|
||||
}
|
||||
err = json.Unmarshal([]byte(alert), &rule)
|
||||
if err != nil {
|
||||
zap.L().Error("invalid rule data", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
alertNames = append(alertNames, rule.AlertName)
|
||||
if rule.AlertType == AlertTypeLogs {
|
||||
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
||||
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
if strings.Contains(alert, "signoz_logs.distributed_logs") ||
|
||||
strings.Contains(alert, "signoz_logs.logs") {
|
||||
alertsInfo.AlertsWithLogsChQuery = alertsInfo.AlertsWithLogsChQuery + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.BuilderQueries {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
if query.Filters != nil {
|
||||
for _, item := range query.Filters.Items {
|
||||
if slices.Contains([]string{"contains", "ncontains", "like", "nlike"}, string(item.Operator)) {
|
||||
if item.Key.Key != "body" {
|
||||
alertsInfo.AlertsWithLogsContainsOp += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if rule.AlertType == AlertTypeMetric {
|
||||
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
alertsInfo.MetricsBuilderQueries = alertsInfo.MetricsBuilderQueries + 1
|
||||
} else if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
alertsInfo.MetricsClickHouseQueries = alertsInfo.MetricsClickHouseQueries + 1
|
||||
} else if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
|
||||
alertsInfo.MetricsPrometheusQueries = alertsInfo.MetricsPrometheusQueries + 1
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.PromQueries {
|
||||
if strings.Contains(query.Query, "signoz_") {
|
||||
alertsInfo.SpanMetricsPrometheusQueries = alertsInfo.SpanMetricsPrometheusQueries + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if rule.RuleType == RuleTypeAnomaly {
|
||||
alertsInfo.AnomalyBasedAlerts = alertsInfo.AnomalyBasedAlerts + 1
|
||||
}
|
||||
} else if rule.AlertType == AlertTypeTraces {
|
||||
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
if strings.Contains(alert, "signoz_traces.distributed_signoz_index_v2") ||
|
||||
strings.Contains(alert, "signoz_traces.distributed_signoz_spans") ||
|
||||
strings.Contains(alert, "signoz_traces.distributed_signoz_error_index_v2") {
|
||||
alertsInfo.AlertsWithTraceChQuery = alertsInfo.AlertsWithTraceChQuery + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||
if !rule.PostableRule.Disabled {
|
||||
alertsInfo.TotalActiveAlerts = alertsInfo.TotalActiveAlerts + 1
|
||||
}
|
||||
}
|
||||
alertsInfo.AlertNames = alertNames
|
||||
|
||||
channels, _ := r.getChannels()
|
||||
if channels != nil {
|
||||
alertsInfo.TotalChannels = len(*channels)
|
||||
for _, channel := range *channels {
|
||||
if channel.Type == "slack" {
|
||||
alertsInfo.SlackChannels = alertsInfo.SlackChannels + 1
|
||||
}
|
||||
if channel.Type == "webhook" {
|
||||
alertsInfo.WebHookChannels = alertsInfo.WebHookChannels + 1
|
||||
}
|
||||
if channel.Type == "email" {
|
||||
alertsInfo.EmailChannels = alertsInfo.EmailChannels + 1
|
||||
}
|
||||
if channel.Type == "pagerduty" {
|
||||
alertsInfo.PagerDutyChannels = alertsInfo.PagerDutyChannels + 1
|
||||
}
|
||||
if channel.Type == "opsgenie" {
|
||||
alertsInfo.OpsGenieChannels = alertsInfo.OpsGenieChannels + 1
|
||||
}
|
||||
if channel.Type == "msteams" {
|
||||
alertsInfo.MSTeamsChannels = alertsInfo.MSTeamsChannels + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &alertsInfo, nil
|
||||
}
|
@ -23,16 +23,21 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/telemetry"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type PrepareTaskOptions struct {
|
||||
Rule *PostableRule
|
||||
Rule *ruletypes.PostableRule
|
||||
TaskName string
|
||||
RuleDB RuleDB
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
@ -41,11 +46,13 @@ type PrepareTaskOptions struct {
|
||||
SQLStore sqlstore.SQLStore
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
OrgID string
|
||||
}
|
||||
|
||||
type PrepareTestRuleOptions struct {
|
||||
Rule *PostableRule
|
||||
RuleDB RuleDB
|
||||
Rule *ruletypes.PostableRule
|
||||
RuleStore ruletypes.RuleStore
|
||||
MaintenanceStore ruletypes.MaintenanceStore
|
||||
Logger *zap.Logger
|
||||
Reader interfaces.Reader
|
||||
Cache cache.Cache
|
||||
@ -109,7 +116,8 @@ type Manager struct {
|
||||
mtx sync.RWMutex
|
||||
block chan struct{}
|
||||
// datastore to store alert definitions
|
||||
ruleDB RuleDB
|
||||
ruleStore ruletypes.RuleStore
|
||||
maintenanceStore ruletypes.MaintenanceStore
|
||||
|
||||
logger *zap.Logger
|
||||
reader interfaces.Reader
|
||||
@ -146,7 +154,7 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
var task Task
|
||||
|
||||
ruleId := RuleIdFromTaskName(opts.TaskName)
|
||||
if opts.Rule.RuleType == RuleTypeThreshold {
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := NewThresholdRule(
|
||||
ruleId,
|
||||
@ -165,9 +173,9 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == RuleTypeProm {
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
pr, err := NewPromRule(
|
||||
@ -186,10 +194,10 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.RuleDB)
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, RuleTypeProm, RuleTypeThreshold)
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@ -199,12 +207,15 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
// by calling the Run method.
|
||||
func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
o = defaultOptions(o)
|
||||
db := NewRuleDB(o.DBConn, o.SQLStore)
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(db.GetAlertsInfo)
|
||||
ruleStore := sqlrulestore.NewRuleStore(o.DBConn, o.SQLStore)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(o.SQLStore)
|
||||
|
||||
telemetry.GetInstance().SetAlertsInfoCallback(ruleStore.GetAlertsInfo)
|
||||
m := &Manager{
|
||||
tasks: map[string]Task{},
|
||||
rules: map[string]Rule{},
|
||||
ruleDB: db,
|
||||
ruleStore: ruleStore,
|
||||
maintenanceStore: maintenanceStore,
|
||||
opts: o,
|
||||
block: make(chan struct{}),
|
||||
logger: o.Logger,
|
||||
@ -219,15 +230,19 @@ func NewManager(o *ManagerOptions) (*Manager, error) {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func (m *Manager) Start() {
|
||||
if err := m.initiate(); err != nil {
|
||||
func (m *Manager) Start(ctx context.Context) {
|
||||
if err := m.initiate(ctx); err != nil {
|
||||
zap.L().Error("failed to initialize alerting rules manager", zap.Error(err))
|
||||
}
|
||||
m.run()
|
||||
m.run(ctx)
|
||||
}
|
||||
|
||||
func (m *Manager) RuleDB() RuleDB {
|
||||
return m.ruleDB
|
||||
func (m *Manager) RuleStore() ruletypes.RuleStore {
|
||||
return m.ruleStore
|
||||
}
|
||||
|
||||
func (m *Manager) MaintenanceStore() ruletypes.MaintenanceStore {
|
||||
return m.maintenanceStore
|
||||
}
|
||||
|
||||
func (m *Manager) Pause(b bool) {
|
||||
@ -238,44 +253,51 @@ func (m *Manager) Pause(b bool) {
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Manager) initiate() error {
|
||||
storedRules, err := m.ruleDB.GetStoredRules(context.Background())
|
||||
func (m *Manager) initiate(ctx context.Context) error {
|
||||
orgIDs, err := m.ruleStore.ListOrgs(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(storedRules) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var loadErrors []error
|
||||
|
||||
for _, rec := range storedRules {
|
||||
taskName := fmt.Sprintf("%d-groupname", rec.Id)
|
||||
parsedRule, err := ParsePostableRule([]byte(rec.Data))
|
||||
|
||||
for _, orgID := range orgIDs {
|
||||
storedRules, err := m.ruleStore.GetStoredRules(ctx, orgID)
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrFailedToParseJSON) {
|
||||
zap.L().Info("failed to load rule in json format, trying yaml now:", zap.String("name", taskName))
|
||||
return err
|
||||
}
|
||||
if len(storedRules) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// see if rule is stored in yaml format
|
||||
parsedRule, err = parsePostableRule([]byte(rec.Data), RuleDataKindYaml)
|
||||
for _, rec := range storedRules {
|
||||
taskName := fmt.Sprintf("%s-groupname", rec.ID.StringValue())
|
||||
parsedRule, err := ruletypes.ParsePostableRule([]byte(rec.Data))
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse and initialize yaml rule", zap.String("name", taskName), zap.Error(err))
|
||||
if err != nil {
|
||||
if errors.Is(err, ruletypes.ErrFailedToParseJSON) {
|
||||
zap.L().Info("failed to load rule in json format, trying yaml now:", zap.String("name", taskName))
|
||||
|
||||
// see if rule is stored in yaml format
|
||||
parsedRule, err = ruletypes.ParsePostableRuleWithKind([]byte(rec.Data), ruletypes.RuleDataKindYaml)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("failed to parse and initialize yaml rule", zap.String("name", taskName), zap.Error(err))
|
||||
// just one rule is being parsed so expect just one error
|
||||
loadErrors = append(loadErrors, err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
zap.L().Error("failed to parse and initialize rule", zap.String("name", taskName), zap.Error(err))
|
||||
// just one rule is being parsed so expect just one error
|
||||
loadErrors = append(loadErrors, err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
zap.L().Error("failed to parse and initialize rule", zap.String("name", taskName), zap.Error(err))
|
||||
// just one rule is being parsed so expect just one error
|
||||
loadErrors = append(loadErrors, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
if !parsedRule.Disabled {
|
||||
err := m.addTask(parsedRule, taskName)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to load the rule definition", zap.String("name", taskName), zap.Error(err))
|
||||
if !parsedRule.Disabled {
|
||||
err := m.addTask(ctx, orgID, parsedRule, taskName)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to load the rule definition", zap.String("name", taskName), zap.Error(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -288,13 +310,13 @@ func (m *Manager) initiate() error {
|
||||
}
|
||||
|
||||
// Run starts processing of the rule manager.
|
||||
func (m *Manager) run() {
|
||||
func (m *Manager) run(_ context.Context) {
|
||||
// initiate blocked tasks
|
||||
close(m.block)
|
||||
}
|
||||
|
||||
// Stop the rule manager's rule evaluation cycles.
|
||||
func (m *Manager) Stop() {
|
||||
func (m *Manager) Stop(ctx context.Context) {
|
||||
m.mtx.Lock()
|
||||
defer m.mtx.Unlock()
|
||||
|
||||
@ -309,28 +331,41 @@ func (m *Manager) Stop() {
|
||||
|
||||
// EditRuleDefinition writes the rule definition to the
|
||||
// datastore and also updates the rule executor
|
||||
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error {
|
||||
func (m *Manager) EditRule(ctx context.Context, ruleStr string, idStr string) error {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return errors.New("claims not found in context")
|
||||
}
|
||||
|
||||
parsedRule, err := ParsePostableRule([]byte(ruleStr))
|
||||
ruleUUID, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
id, err := strconv.Atoi(idStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ruleHistory, err := m.ruleStore.GetRuleUUID(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ruleUUID = ruleHistory.RuleUUID
|
||||
}
|
||||
|
||||
parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingRule, err := m.ruleDB.GetStoredRule(ctx, id)
|
||||
existingRule, err := m.ruleStore.GetStoredRule(ctx, ruleUUID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
existingRule.UpdatedAt = &now
|
||||
existingRule.UpdatedBy = &claims.Email
|
||||
existingRule.UpdatedAt = time.Now()
|
||||
existingRule.UpdatedBy = claims.Email
|
||||
existingRule.Data = ruleStr
|
||||
|
||||
return m.ruleDB.EditRule(ctx, existingRule, func(ctx context.Context) error {
|
||||
return m.ruleStore.EditRule(ctx, existingRule, func(ctx context.Context) error {
|
||||
cfg, err := m.alertmanager.GetConfig(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -350,7 +385,7 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error
|
||||
preferredChannels = parsedRule.PreferredChannels
|
||||
}
|
||||
|
||||
err = cfg.UpdateRuleIDMatcher(id, preferredChannels)
|
||||
err = cfg.UpdateRuleIDMatcher(ruleUUID.StringValue(), preferredChannels)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -361,7 +396,7 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error
|
||||
}
|
||||
|
||||
if !m.opts.DisableRules {
|
||||
err = m.syncRuleStateWithTask(prepareTaskName(existingRule.Id), parsedRule)
|
||||
err = m.syncRuleStateWithTask(ctx, claims.OrgID, prepareTaskName(existingRule.ID.StringValue()), parsedRule)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -371,25 +406,26 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id string) error
|
||||
})
|
||||
}
|
||||
|
||||
func (m *Manager) editTask(rule *PostableRule, taskName string) error {
|
||||
func (m *Manager) editTask(_ context.Context, orgID string, rule *ruletypes.PostableRule, taskName string) error {
|
||||
m.mtx.Lock()
|
||||
defer m.mtx.Unlock()
|
||||
|
||||
zap.L().Debug("editing a rule task", zap.String("name", taskName))
|
||||
|
||||
newTask, err := m.prepareTaskFunc(PrepareTaskOptions{
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleDB: m.ruleDB,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: m.opts.UseTraceNewSchema,
|
||||
OrgID: orgID,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@ -426,11 +462,11 @@ func (m *Manager) editTask(rule *PostableRule, taskName string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *Manager) DeleteRule(ctx context.Context, id string) error {
|
||||
_, err := strconv.Atoi(id)
|
||||
func (m *Manager) DeleteRule(ctx context.Context, idStr string) error {
|
||||
id, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
zap.L().Error("delete rule received an rule id in invalid format, must be a number", zap.String("id", id), zap.Error(err))
|
||||
return fmt.Errorf("delete rule received an rule id in invalid format, must be a number")
|
||||
zap.L().Error("delete rule received an rule id in invalid format, must be a valid uuid-v7", zap.String("id", idStr), zap.Error(err))
|
||||
return fmt.Errorf("delete rule received an rule id in invalid format, must be a valid uuid-v7")
|
||||
}
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
@ -438,18 +474,18 @@ func (m *Manager) DeleteRule(ctx context.Context, id string) error {
|
||||
return errors.New("claims not found in context")
|
||||
}
|
||||
|
||||
_, err = m.ruleDB.GetStoredRule(ctx, id)
|
||||
_, err = m.ruleStore.GetStoredRule(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return m.ruleDB.DeleteRule(ctx, id, func(ctx context.Context) error {
|
||||
return m.ruleStore.DeleteRule(ctx, id, func(ctx context.Context) error {
|
||||
cfg, err := m.alertmanager.GetConfig(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cfg.DeleteRuleIDMatcher(id)
|
||||
err = cfg.DeleteRuleIDMatcher(id.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -459,7 +495,7 @@ func (m *Manager) DeleteRule(ctx context.Context, id string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
taskName := prepareTaskName(id)
|
||||
taskName := prepareTaskName(id.StringValue())
|
||||
if !m.opts.DisableRules {
|
||||
m.deleteTask(taskName)
|
||||
}
|
||||
@ -486,27 +522,35 @@ func (m *Manager) deleteTask(taskName string) {
|
||||
|
||||
// CreateRule stores rule def into db and also
|
||||
// starts an executor for the rule
|
||||
func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule, error) {
|
||||
parsedRule, err := ParsePostableRule([]byte(ruleStr))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.GettableRule, error) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return nil, errors.New("claims not found in context")
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
storedRule := &StoredRule{
|
||||
CreatedAt: &now,
|
||||
CreatedBy: &claims.Email,
|
||||
UpdatedAt: &now,
|
||||
UpdatedBy: &claims.Email,
|
||||
Data: ruleStr,
|
||||
parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
id, err := m.ruleDB.CreateRule(ctx, storedRule, func(ctx context.Context, id int64) error {
|
||||
now := time.Now()
|
||||
storedRule := &ruletypes.Rule{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: claims.Email,
|
||||
UpdatedBy: claims.Email,
|
||||
},
|
||||
Data: ruleStr,
|
||||
OrgID: claims.OrgID,
|
||||
}
|
||||
|
||||
id, err := m.ruleStore.CreateRule(ctx, storedRule, func(ctx context.Context, id valuer.UUID) error {
|
||||
cfg, err := m.alertmanager.GetConfig(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -526,7 +570,7 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule
|
||||
preferredChannels = parsedRule.PreferredChannels
|
||||
}
|
||||
|
||||
err = cfg.CreateRuleIDMatcher(fmt.Sprintf("%d", id), preferredChannels)
|
||||
err = cfg.CreateRuleIDMatcher(id.StringValue(), preferredChannels)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -536,9 +580,9 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule
|
||||
return err
|
||||
}
|
||||
|
||||
taskName := prepareTaskName(id)
|
||||
taskName := prepareTaskName(id.StringValue())
|
||||
if !m.opts.DisableRules {
|
||||
if err := m.addTask(parsedRule, taskName); err != nil {
|
||||
if err := m.addTask(ctx, claims.OrgID, parsedRule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -549,30 +593,31 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*GettableRule
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &GettableRule{
|
||||
Id: fmt.Sprintf("%d", id),
|
||||
return &ruletypes.GettableRule{
|
||||
Id: id.StringValue(),
|
||||
PostableRule: *parsedRule,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *Manager) addTask(rule *PostableRule, taskName string) error {
|
||||
func (m *Manager) addTask(_ context.Context, orgID string, rule *ruletypes.PostableRule, taskName string) error {
|
||||
m.mtx.Lock()
|
||||
defer m.mtx.Unlock()
|
||||
|
||||
zap.L().Debug("adding a new rule task", zap.String("name", taskName))
|
||||
newTask, err := m.prepareTaskFunc(PrepareTaskOptions{
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleDB: m.ruleDB,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
|
||||
Rule: rule,
|
||||
TaskName: taskName,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
UseLogsNewSchema: m.opts.UseLogsNewSchema,
|
||||
UseTraceNewSchema: m.opts.UseTraceNewSchema,
|
||||
OrgID: orgID,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@ -648,17 +693,17 @@ func (m *Manager) Rules() []Rule {
|
||||
}
|
||||
|
||||
// TriggeredAlerts returns the list of the manager's rules.
|
||||
func (m *Manager) TriggeredAlerts() []*NamedAlert {
|
||||
func (m *Manager) TriggeredAlerts() []*ruletypes.NamedAlert {
|
||||
// m.mtx.RLock()
|
||||
// defer m.mtx.RUnlock()
|
||||
|
||||
namedAlerts := []*NamedAlert{}
|
||||
namedAlerts := []*ruletypes.NamedAlert{}
|
||||
|
||||
for _, r := range m.rules {
|
||||
active := r.ActiveAlerts()
|
||||
|
||||
for _, a := range active {
|
||||
awn := &NamedAlert{
|
||||
awn := &ruletypes.NamedAlert{
|
||||
Alert: a,
|
||||
Name: r.Name(),
|
||||
}
|
||||
@ -670,11 +715,11 @@ func (m *Manager) TriggeredAlerts() []*NamedAlert {
|
||||
}
|
||||
|
||||
// NotifyFunc sends notifications about a set of alerts generated by the given expression.
|
||||
type NotifyFunc func(ctx context.Context, orgID string, expr string, alerts ...*Alert)
|
||||
type NotifyFunc func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert)
|
||||
|
||||
// prepareNotifyFunc implements the NotifyFunc for a Notifier.
|
||||
func (m *Manager) prepareNotifyFunc() NotifyFunc {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*Alert) {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert) {
|
||||
var res []*alertmanagertypes.PostableAlert
|
||||
|
||||
for _, alert := range alerts {
|
||||
@ -707,7 +752,7 @@ func (m *Manager) prepareNotifyFunc() NotifyFunc {
|
||||
}
|
||||
|
||||
func (m *Manager) prepareTestNotifyFunc() NotifyFunc {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*Alert) {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert) {
|
||||
if len(alerts) == 0 {
|
||||
return
|
||||
}
|
||||
@ -758,26 +803,29 @@ func (m *Manager) ListActiveRules() ([]Rule, error) {
|
||||
return ruleList, nil
|
||||
}
|
||||
|
||||
func (m *Manager) ListRuleStates(ctx context.Context) (*GettableRules, error) {
|
||||
|
||||
func (m *Manager) ListRuleStates(ctx context.Context) (*ruletypes.GettableRules, error) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return nil, errors.New("claims not found in context")
|
||||
}
|
||||
// fetch rules from DB
|
||||
storedRules, err := m.ruleDB.GetStoredRules(ctx)
|
||||
storedRules, err := m.ruleStore.GetStoredRules(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// initiate response object
|
||||
resp := make([]*GettableRule, 0)
|
||||
resp := make([]*ruletypes.GettableRule, 0)
|
||||
|
||||
for _, s := range storedRules {
|
||||
|
||||
ruleResponse := &GettableRule{}
|
||||
ruleResponse := &ruletypes.GettableRule{}
|
||||
if err := json.Unmarshal([]byte(s.Data), ruleResponse); err != nil { // Parse []byte to go struct pointer
|
||||
zap.L().Error("failed to unmarshal rule from db", zap.Int("id", s.Id), zap.Error(err))
|
||||
zap.L().Error("failed to unmarshal rule from db", zap.String("id", s.ID.StringValue()), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
ruleResponse.Id = fmt.Sprintf("%d", s.Id)
|
||||
ruleResponse.Id = s.ID.StringValue()
|
||||
|
||||
// fetch state of rule from memory
|
||||
if rm, ok := m.rules[ruleResponse.Id]; !ok {
|
||||
@ -786,26 +834,40 @@ func (m *Manager) ListRuleStates(ctx context.Context) (*GettableRules, error) {
|
||||
} else {
|
||||
ruleResponse.State = rm.State()
|
||||
}
|
||||
ruleResponse.CreatedAt = s.CreatedAt
|
||||
ruleResponse.CreatedBy = s.CreatedBy
|
||||
ruleResponse.UpdatedAt = s.UpdatedAt
|
||||
ruleResponse.UpdatedBy = s.UpdatedBy
|
||||
ruleResponse.CreatedAt = &s.CreatedAt
|
||||
ruleResponse.CreatedBy = &s.CreatedBy
|
||||
ruleResponse.UpdatedAt = &s.UpdatedAt
|
||||
ruleResponse.UpdatedBy = &s.UpdatedBy
|
||||
resp = append(resp, ruleResponse)
|
||||
}
|
||||
|
||||
return &GettableRules{Rules: resp}, nil
|
||||
return &ruletypes.GettableRules{Rules: resp}, nil
|
||||
}
|
||||
|
||||
func (m *Manager) GetRule(ctx context.Context, id string) (*GettableRule, error) {
|
||||
s, err := m.ruleDB.GetStoredRule(ctx, id)
|
||||
func (m *Manager) GetRule(ctx context.Context, idStr string) (*ruletypes.GettableRule, error) {
|
||||
ruleUUID, err := valuer.NewUUID(idStr)
|
||||
if err != nil {
|
||||
id, err := strconv.Atoi(idStr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ruleHistory, err := m.ruleStore.GetRuleUUID(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ruleUUID = ruleHistory.RuleUUID
|
||||
}
|
||||
|
||||
s, err := m.ruleStore.GetStoredRule(ctx, ruleUUID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := &GettableRule{}
|
||||
r := &ruletypes.GettableRule{}
|
||||
if err := json.Unmarshal([]byte(s.Data), r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.Id = fmt.Sprintf("%d", s.Id)
|
||||
r.Id = ruleUUID.StringValue()
|
||||
// fetch state of rule from memory
|
||||
if rm, ok := m.rules[r.Id]; !ok {
|
||||
r.State = model.StateDisabled
|
||||
@ -813,10 +875,10 @@ func (m *Manager) GetRule(ctx context.Context, id string) (*GettableRule, error)
|
||||
} else {
|
||||
r.State = rm.State()
|
||||
}
|
||||
r.CreatedAt = s.CreatedAt
|
||||
r.CreatedBy = s.CreatedBy
|
||||
r.UpdatedAt = s.UpdatedAt
|
||||
r.UpdatedBy = s.UpdatedBy
|
||||
r.CreatedAt = &s.CreatedAt
|
||||
r.CreatedBy = &s.CreatedBy
|
||||
r.UpdatedAt = &s.UpdatedAt
|
||||
r.UpdatedBy = &s.UpdatedBy
|
||||
|
||||
return r, nil
|
||||
}
|
||||
@ -824,7 +886,7 @@ func (m *Manager) GetRule(ctx context.Context, id string) (*GettableRule, error)
|
||||
// syncRuleStateWithTask ensures that the state of a stored rule matches
|
||||
// the task state. For example - if a stored rule is disabled, then
|
||||
// there is no task running against it.
|
||||
func (m *Manager) syncRuleStateWithTask(taskName string, rule *PostableRule) error {
|
||||
func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID string, taskName string, rule *ruletypes.PostableRule) error {
|
||||
|
||||
if rule.Disabled {
|
||||
// check if rule has any task running
|
||||
@ -836,11 +898,11 @@ func (m *Manager) syncRuleStateWithTask(taskName string, rule *PostableRule) err
|
||||
// check if rule has a task running
|
||||
if _, ok := m.tasks[taskName]; !ok {
|
||||
// rule has not task, start one
|
||||
if err := m.addTask(rule, taskName); err != nil {
|
||||
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := m.editTask(rule, taskName); err != nil {
|
||||
if err := m.editTask(ctx, orgID, rule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -855,40 +917,41 @@ func (m *Manager) syncRuleStateWithTask(taskName string, rule *PostableRule) err
|
||||
// - over write the patch attributes received in input (ruleStr)
|
||||
// - re-deploy or undeploy task as necessary
|
||||
// - update the patched rule in the DB
|
||||
func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string) (*GettableRule, error) {
|
||||
func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleIdStr string) (*ruletypes.GettableRule, error) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return nil, errors.New("claims not found in context")
|
||||
}
|
||||
|
||||
if ruleId == "" {
|
||||
return nil, fmt.Errorf("id is mandatory for patching rule")
|
||||
ruleID, err := valuer.NewUUID(ruleIdStr)
|
||||
if err != nil {
|
||||
return nil, errors.New(err.Error())
|
||||
}
|
||||
|
||||
taskName := prepareTaskName(ruleId)
|
||||
taskName := prepareTaskName(ruleID.StringValue())
|
||||
|
||||
// retrieve rule from DB
|
||||
storedJSON, err := m.ruleDB.GetStoredRule(ctx, ruleId)
|
||||
storedJSON, err := m.ruleStore.GetStoredRule(ctx, ruleID)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get stored rule with given id", zap.String("id", ruleId), zap.Error(err))
|
||||
zap.L().Error("failed to get stored rule with given id", zap.String("id", ruleID.StringValue()), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// storedRule holds the current stored rule from DB
|
||||
storedRule := PostableRule{}
|
||||
storedRule := ruletypes.PostableRule{}
|
||||
if err := json.Unmarshal([]byte(storedJSON.Data), &storedRule); err != nil {
|
||||
zap.L().Error("failed to unmarshal stored rule with given id", zap.String("id", ruleId), zap.Error(err))
|
||||
zap.L().Error("failed to unmarshal stored rule with given id", zap.String("id", ruleID.StringValue()), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// patchedRule is combo of stored rule and patch received in the request
|
||||
patchedRule, err := parseIntoRule(storedRule, []byte(ruleStr), "json")
|
||||
patchedRule, err := ruletypes.ParseIntoRule(storedRule, []byte(ruleStr), "json")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// deploy or un-deploy task according to patched (new) rule state
|
||||
if err := m.syncRuleStateWithTask(taskName, patchedRule); err != nil {
|
||||
if err := m.syncRuleStateWithTask(ctx, claims.OrgID, taskName, patchedRule); err != nil {
|
||||
zap.L().Error("failed to sync stored rule state with the task", zap.String("taskName", taskName), zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
@ -901,25 +964,25 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string)
|
||||
|
||||
now := time.Now()
|
||||
storedJSON.Data = string(patchedRuleBytes)
|
||||
storedJSON.UpdatedBy = &claims.Email
|
||||
storedJSON.UpdatedAt = &now
|
||||
storedJSON.UpdatedBy = claims.Email
|
||||
storedJSON.UpdatedAt = now
|
||||
|
||||
err = m.ruleDB.EditRule(ctx, storedJSON, func(ctx context.Context) error { return nil })
|
||||
err = m.ruleStore.EditRule(ctx, storedJSON, func(ctx context.Context) error { return nil })
|
||||
if err != nil {
|
||||
if err := m.syncRuleStateWithTask(taskName, &storedRule); err != nil {
|
||||
if err := m.syncRuleStateWithTask(ctx, claims.OrgID, taskName, &storedRule); err != nil {
|
||||
zap.L().Error("failed to restore rule after patch failure", zap.String("taskName", taskName), zap.Error(err))
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// prepare http response
|
||||
response := GettableRule{
|
||||
Id: ruleId,
|
||||
response := ruletypes.GettableRule{
|
||||
Id: ruleID.StringValue(),
|
||||
PostableRule: *patchedRule,
|
||||
}
|
||||
|
||||
// fetch state of rule from memory
|
||||
if rm, ok := m.rules[ruleId]; !ok {
|
||||
if rm, ok := m.rules[ruleID.StringValue()]; !ok {
|
||||
response.State = model.StateDisabled
|
||||
response.Disabled = true
|
||||
} else {
|
||||
@ -933,7 +996,7 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, ruleId string)
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *model.ApiError) {
|
||||
|
||||
parsedRule, err := ParsePostableRule([]byte(ruleStr))
|
||||
parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr))
|
||||
|
||||
if err != nil {
|
||||
return 0, model.BadRequest(err)
|
||||
@ -941,7 +1004,8 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
|
||||
|
||||
alertCount, apiErr := m.prepareTestRuleFunc(PrepareTestRuleOptions{
|
||||
Rule: parsedRule,
|
||||
RuleDB: m.ruleDB,
|
||||
RuleStore: m.ruleStore,
|
||||
MaintenanceStore: m.maintenanceStore,
|
||||
Logger: m.logger,
|
||||
Reader: m.reader,
|
||||
Cache: m.cache,
|
||||
@ -955,33 +1019,37 @@ func (m *Manager) TestNotification(ctx context.Context, ruleStr string) (int, *m
|
||||
return alertCount, apiErr
|
||||
}
|
||||
|
||||
func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames []string) (map[string][]GettableRule, *model.ApiError) {
|
||||
result := make(map[string][]GettableRule)
|
||||
func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames []string) (map[string][]ruletypes.GettableRule, *model.ApiError) {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: errors.New("claims not found in context")}
|
||||
}
|
||||
|
||||
rules, err := m.ruleDB.GetStoredRules(ctx)
|
||||
result := make(map[string][]ruletypes.GettableRule)
|
||||
rules, err := m.ruleStore.GetStoredRules(ctx, claims.OrgID)
|
||||
if err != nil {
|
||||
zap.L().Error("Error getting stored rules", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
metricRulesMap := make(map[string][]GettableRule)
|
||||
metricRulesMap := make(map[string][]ruletypes.GettableRule)
|
||||
|
||||
for _, storedRule := range rules {
|
||||
var rule GettableRule
|
||||
var rule ruletypes.GettableRule
|
||||
if err := json.Unmarshal([]byte(storedRule.Data), &rule); err != nil {
|
||||
zap.L().Error("Invalid rule data", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
if rule.AlertType != AlertTypeMetric || rule.RuleCondition == nil || rule.RuleCondition.CompositeQuery == nil {
|
||||
if rule.AlertType != ruletypes.AlertTypeMetric || rule.RuleCondition == nil || rule.RuleCondition.CompositeQuery == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
rule.Id = fmt.Sprintf("%d", storedRule.Id)
|
||||
rule.CreatedAt = storedRule.CreatedAt
|
||||
rule.CreatedBy = storedRule.CreatedBy
|
||||
rule.UpdatedAt = storedRule.UpdatedAt
|
||||
rule.UpdatedBy = storedRule.UpdatedBy
|
||||
rule.Id = storedRule.ID.StringValue()
|
||||
rule.CreatedAt = &storedRule.CreatedAt
|
||||
rule.CreatedBy = &storedRule.CreatedBy
|
||||
rule.UpdatedAt = &storedRule.UpdatedAt
|
||||
rule.UpdatedBy = &storedRule.UpdatedBy
|
||||
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.BuilderQueries {
|
||||
if query.AggregateAttribute.Key != "" {
|
||||
@ -1013,7 +1081,7 @@ func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames
|
||||
for _, metricName := range metricNames {
|
||||
if rules, exists := metricRulesMap[metricName]; exists {
|
||||
seen := make(map[string]bool)
|
||||
uniqueRules := make([]GettableRule, 0)
|
||||
uniqueRules := make([]ruletypes.GettableRule, 0)
|
||||
|
||||
for _, rule := range rules {
|
||||
if !seen[rule.Id] {
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
@ -27,7 +28,7 @@ type PromRule struct {
|
||||
|
||||
func NewPromRule(
|
||||
id string,
|
||||
postableRule *PostableRule,
|
||||
postableRule *ruletypes.PostableRule,
|
||||
logger *zap.Logger,
|
||||
reader interfaces.Reader,
|
||||
prometheus prometheus.Prometheus,
|
||||
@ -55,8 +56,8 @@ func NewPromRule(
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
func (r *PromRule) Type() RuleType {
|
||||
return RuleTypeProm
|
||||
func (r *PromRule) Type() ruletypes.RuleType {
|
||||
return ruletypes.RuleTypeProm
|
||||
}
|
||||
|
||||
func (r *PromRule) GetSelectedQuery() string {
|
||||
@ -110,7 +111,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
zap.L().Info("evaluating promql query", zap.String("name", r.Name()), zap.String("query", q))
|
||||
res, err := r.RunAlertQuery(ctx, q, start, end, interval)
|
||||
if err != nil {
|
||||
r.SetHealth(HealthBad)
|
||||
r.SetHealth(ruletypes.HealthBad)
|
||||
r.SetLastError(err)
|
||||
return nil, err
|
||||
}
|
||||
@ -120,7 +121,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
|
||||
var alerts = make(map[uint64]*Alert, len(res))
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
for _, series := range res {
|
||||
l := make(map[string]string, len(series.Metric))
|
||||
@ -140,14 +141,14 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
|
||||
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
|
||||
|
||||
tmplData := AlertTemplateData(l, valueFormatter.Format(alertSmpl.V, r.Unit()), threshold)
|
||||
tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(alertSmpl.V, r.Unit()), threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := NewTemplateExpander(
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
@ -187,12 +188,12 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels")
|
||||
// We have already acquired the lock above hence using SetHealth and
|
||||
// SetLastError will deadlock.
|
||||
r.health = HealthBad
|
||||
r.health = ruletypes.HealthBad
|
||||
r.lastError = err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &Alert{
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
@ -231,7 +232,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ResolvedRetention) {
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
@ -270,7 +271,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
}
|
||||
|
||||
}
|
||||
r.health = HealthGood
|
||||
r.health = ruletypes.HealthGood
|
||||
r.lastError = err
|
||||
|
||||
currentState := r.State()
|
||||
@ -289,10 +290,10 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error)
|
||||
|
||||
func (r *PromRule) String() string {
|
||||
|
||||
ar := PostableRule{
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: Duration(r.evalWindow),
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
plabels "github.com/prometheus/prometheus/model/labels"
|
||||
"go.uber.org/zap"
|
||||
@ -35,12 +36,13 @@ type PromRuleTask struct {
|
||||
logger *zap.Logger
|
||||
notify NotifyFunc
|
||||
|
||||
ruleDB RuleDB
|
||||
maintenanceStore ruletypes.MaintenanceStore
|
||||
orgID string
|
||||
}
|
||||
|
||||
// newPromRuleTask holds rules that have promql condition
|
||||
// and evalutes the rule at a given frequency
|
||||
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, ruleDB RuleDB) *PromRuleTask {
|
||||
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) *PromRuleTask {
|
||||
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
@ -58,8 +60,9 @@ func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, o
|
||||
done: make(chan struct{}),
|
||||
terminated: make(chan struct{}),
|
||||
notify: notify,
|
||||
ruleDB: ruleDB,
|
||||
maintenanceStore: maintenanceStore,
|
||||
logger: opts.Logger,
|
||||
orgID: orgID,
|
||||
}
|
||||
}
|
||||
|
||||
@ -323,9 +326,7 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
}()
|
||||
|
||||
zap.L().Info("promql rule task", zap.String("name", g.name), zap.Time("eval started at", ts))
|
||||
|
||||
maintenance, err := g.ruleDB.GetAllPlannedMaintenance(ctx)
|
||||
|
||||
maintenance, err := g.maintenanceStore.GetAllPlannedMaintenance(ctx, g.orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
}
|
||||
@ -338,7 +339,7 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
shouldSkip := false
|
||||
for _, m := range maintenance {
|
||||
zap.L().Info("checking if rule should be skipped", zap.String("rule", rule.ID()), zap.Any("maintenance", m))
|
||||
if m.shouldSkip(rule.ID(), ts) {
|
||||
if m.ShouldSkip(rule.ID(), ts) {
|
||||
shouldSkip = true
|
||||
break
|
||||
}
|
||||
@ -376,7 +377,7 @@ func (g *PromRuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
_, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
rule.SetHealth(HealthBad)
|
||||
rule.SetHealth(ruletypes.HealthBad)
|
||||
rule.SetLastError(err)
|
||||
|
||||
zap.L().Warn("Evaluating rule failed", zap.String("ruleid", rule.ID()), zap.Error(err))
|
||||
|
@ -5,19 +5,20 @@ import (
|
||||
"time"
|
||||
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
pql "github.com/prometheus/prometheus/promql"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func TestPromRuleShouldAlert(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test Rule",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeProm,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
@ -652,8 +653,8 @@ func TestPromRuleShouldAlert(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewPromRule("69", &postableRule, zap.NewNop(), nil, nil)
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
)
|
||||
|
||||
// A Rule encapsulates a vector expression which is evaluated at a specified
|
||||
@ -13,16 +14,16 @@ import (
|
||||
type Rule interface {
|
||||
ID() string
|
||||
Name() string
|
||||
Type() RuleType
|
||||
Type() ruletypes.RuleType
|
||||
|
||||
Labels() labels.BaseLabels
|
||||
Annotations() labels.BaseLabels
|
||||
Condition() *RuleCondition
|
||||
Condition() *ruletypes.RuleCondition
|
||||
EvalDelay() time.Duration
|
||||
EvalWindow() time.Duration
|
||||
HoldDuration() time.Duration
|
||||
State() model.AlertState
|
||||
ActiveAlerts() []*Alert
|
||||
ActiveAlerts() []*ruletypes.Alert
|
||||
|
||||
PreferredChannels() []string
|
||||
|
||||
@ -30,8 +31,8 @@ type Rule interface {
|
||||
String() string
|
||||
SetLastError(error)
|
||||
LastError() error
|
||||
SetHealth(RuleHealth)
|
||||
Health() RuleHealth
|
||||
SetHealth(ruletypes.RuleHealth)
|
||||
Health() ruletypes.RuleHealth
|
||||
SetEvaluationDuration(time.Duration)
|
||||
GetEvaluationDuration() time.Duration
|
||||
SetEvaluationTimestamp(time.Time)
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -32,13 +33,14 @@ type RuleTask struct {
|
||||
pause bool
|
||||
notify NotifyFunc
|
||||
|
||||
ruleDB RuleDB
|
||||
maintenanceStore ruletypes.MaintenanceStore
|
||||
orgID string
|
||||
}
|
||||
|
||||
const DefaultFrequency = 1 * time.Minute
|
||||
|
||||
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
|
||||
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, ruleDB RuleDB) *RuleTask {
|
||||
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) *RuleTask {
|
||||
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
frequency = DefaultFrequency
|
||||
@ -46,16 +48,17 @@ func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts
|
||||
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
|
||||
return &RuleTask{
|
||||
name: name,
|
||||
file: file,
|
||||
pause: false,
|
||||
frequency: frequency,
|
||||
rules: rules,
|
||||
opts: opts,
|
||||
done: make(chan struct{}),
|
||||
terminated: make(chan struct{}),
|
||||
notify: notify,
|
||||
ruleDB: ruleDB,
|
||||
name: name,
|
||||
file: file,
|
||||
pause: false,
|
||||
frequency: frequency,
|
||||
rules: rules,
|
||||
opts: opts,
|
||||
done: make(chan struct{}),
|
||||
terminated: make(chan struct{}),
|
||||
notify: notify,
|
||||
maintenanceStore: maintenanceStore,
|
||||
orgID: orgID,
|
||||
}
|
||||
}
|
||||
|
||||
@ -305,7 +308,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
zap.L().Debug("rule task eval started", zap.String("name", g.name), zap.Time("start time", ts))
|
||||
|
||||
maintenance, err := g.ruleDB.GetAllPlannedMaintenance(ctx)
|
||||
maintenance, err := g.maintenanceStore.GetAllPlannedMaintenance(ctx, g.orgID)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
@ -319,7 +322,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
shouldSkip := false
|
||||
for _, m := range maintenance {
|
||||
zap.L().Info("checking if rule should be skipped", zap.String("rule", rule.ID()), zap.Any("maintenance", m))
|
||||
if m.shouldSkip(rule.ID(), ts) {
|
||||
if m.ShouldSkip(rule.ID(), ts) {
|
||||
shouldSkip = true
|
||||
break
|
||||
}
|
||||
@ -357,7 +360,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
_, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
rule.SetHealth(HealthBad)
|
||||
rule.SetHealth(ruletypes.HealthBad)
|
||||
rule.SetLastError(err)
|
||||
|
||||
zap.L().Warn("Evaluating rule failed", zap.String("ruleid", rule.ID()), zap.Error(err))
|
||||
|
@ -3,6 +3,8 @@ package rules
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
)
|
||||
|
||||
type TaskType string
|
||||
@ -29,9 +31,9 @@ type Task interface {
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
func newTask(taskType TaskType, name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, ruleDB RuleDB) Task {
|
||||
func newTask(taskType TaskType, name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID string) Task {
|
||||
if taskType == TaskTypeCh {
|
||||
return NewRuleTask(name, file, frequency, rules, opts, notify, ruleDB)
|
||||
return NewRuleTask(name, file, frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
return NewPromRuleTask(name, file, frequency, rules, opts, notify, ruleDB)
|
||||
return NewPromRuleTask(name, file, frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -30,12 +31,12 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
}
|
||||
|
||||
// append name to indicate this is test alert
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, TestAlertPostFix)
|
||||
parsedRule.AlertName = fmt.Sprintf("%s%s", alertname, ruletypes.TestAlertPostFix)
|
||||
|
||||
var rule Rule
|
||||
var err error
|
||||
|
||||
if parsedRule.RuleType == RuleTypeThreshold {
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
@ -59,7 +60,7 @@ func defaultTestNotification(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
return 0, model.BadRequest(err)
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == RuleTypeProm {
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
// create promql rule
|
||||
rule, err = NewPromRule(
|
||||
|
@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/contextlinks"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/postprocess"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
@ -57,7 +58,7 @@ type ThresholdRule struct {
|
||||
|
||||
func NewThresholdRule(
|
||||
id string,
|
||||
p *PostableRule,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
useLogsNewSchema bool,
|
||||
useTraceNewSchema bool,
|
||||
@ -99,8 +100,8 @@ func NewThresholdRule(
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) Type() RuleType {
|
||||
return RuleTypeThreshold
|
||||
func (r *ThresholdRule) Type() ruletypes.RuleType {
|
||||
return ruletypes.RuleTypeThreshold
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) prepareQueryRange(ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
@ -258,7 +259,7 @@ func (r *ThresholdRule) GetSelectedQuery() string {
|
||||
return r.ruleCondition.GetSelectedQueryName()
|
||||
}
|
||||
|
||||
func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (Vector, error) {
|
||||
func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ts)
|
||||
if err != nil {
|
||||
@ -344,7 +345,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (Vec
|
||||
r.lastTimestampWithDatapoints = time.Now()
|
||||
}
|
||||
|
||||
var resultVector Vector
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
// if the data is missing for `For` duration then we should send alert
|
||||
if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(time.Duration(r.Condition().AbsentFor)*time.Minute).Before(time.Now()) {
|
||||
@ -353,7 +354,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, ts time.Time) (Vec
|
||||
if !r.lastTimestampWithDatapoints.IsZero() {
|
||||
lbls.Set("lastSeen", r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat))
|
||||
}
|
||||
resultVector = append(resultVector, Sample{
|
||||
resultVector = append(resultVector, ruletypes.Sample{
|
||||
Metric: lbls.Labels(),
|
||||
IsMissing: true,
|
||||
})
|
||||
@ -384,7 +385,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
defer r.mtx.Unlock()
|
||||
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*Alert, len(res))
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
@ -396,7 +397,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
threshold := valueFormatter.Format(r.targetVal(), r.Unit())
|
||||
zap.L().Debug("Alert template data for rule", zap.String("name", r.Name()), zap.String("formatter", valueFormatter.Name()), zap.String("value", value), zap.String("threshold", threshold))
|
||||
|
||||
tmplData := AlertTemplateData(l, value, threshold)
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
// who are not used to Go's templating system.
|
||||
defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}"
|
||||
@ -404,7 +405,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
// utility function to apply go template on labels and annotations
|
||||
expand := func(text string) string {
|
||||
|
||||
tmpl := NewTemplateExpander(
|
||||
tmpl := ruletypes.NewTemplateExpander(
|
||||
ctx,
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
@ -442,13 +443,13 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
// Links with timestamps should go in annotations since labels
|
||||
// is used alert grouping, and we want to group alerts with the same
|
||||
// label set, but different timestamps, together.
|
||||
if r.typ == AlertTypeTraces {
|
||||
if r.typ == ruletypes.AlertTypeTraces {
|
||||
link := r.prepareLinksToTraces(ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
zap.L().Info("adding traces link to annotations", zap.String("link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)))
|
||||
annotations = append(annotations, labels.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
} else if r.typ == AlertTypeLogs {
|
||||
} else if r.typ == ruletypes.AlertTypeLogs {
|
||||
link := r.prepareLinksToLogs(ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
zap.L().Info("adding logs link to annotations", zap.String("link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)))
|
||||
@ -466,7 +467,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &Alert{
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
Annotations: annotations,
|
||||
@ -507,7 +508,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ResolvedRetention) {
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
@ -558,7 +559,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
r.health = HealthGood
|
||||
r.health = ruletypes.HealthGood
|
||||
r.lastError = err
|
||||
|
||||
return len(r.Active), nil
|
||||
@ -566,10 +567,10 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er
|
||||
|
||||
func (r *ThresholdRule) String() string {
|
||||
|
||||
ar := PostableRule{
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: Duration(r.evalWindow),
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
@ -15,6 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
@ -27,13 +28,13 @@ import (
|
||||
)
|
||||
|
||||
func TestThresholdRuleShouldAlert(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Tricky Condition Tests",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -796,8 +797,8 @@ func TestThresholdRuleShouldAlert(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
@ -859,13 +860,13 @@ func TestNormalizeLabelName(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPrepareLinksToLogs(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Tricky Condition Tests",
|
||||
AlertType: AlertTypeLogs,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -900,13 +901,13 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestPrepareLinksToTraces(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Links to traces test",
|
||||
AlertType: AlertTypeTraces,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -941,13 +942,13 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Tricky Condition Tests",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1000,8 +1001,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
|
||||
rule, err := NewThresholdRule("69", &postableRule, nil, true, true, WithEvalDelay(2*time.Minute))
|
||||
@ -1024,13 +1025,13 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test Eval Delay",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
ClickHouseQueries: map[string]*v3.ClickHouseQuery{
|
||||
@ -1072,13 +1073,13 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Tricky Condition Tests",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeClickHouseSQL,
|
||||
ClickHouseQueries: map[string]*v3.ClickHouseQuery{
|
||||
@ -1127,13 +1128,13 @@ func (m *queryMatcherAny) Match(x string, y string) error {
|
||||
}
|
||||
|
||||
func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Units test",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1230,8 +1231,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery(queryString).
|
||||
WillReturnRows(rows)
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit
|
||||
postableRule.RuleCondition.TargetUnit = c.targetUnit
|
||||
@ -1276,13 +1277,13 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleNoData(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "No data test",
|
||||
AlertType: AlertTypeMetric,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1330,8 +1331,8 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
ExpectQuery(queryString).
|
||||
WillReturnRows(rows)
|
||||
var target float64 = 0
|
||||
postableRule.RuleCondition.CompareOp = ValueIsEq
|
||||
postableRule.RuleCondition.MatchType = AtleastOnce
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.ValueIsEq
|
||||
postableRule.RuleCondition.MatchType = ruletypes.AtleastOnce
|
||||
postableRule.RuleCondition.Target = &target
|
||||
postableRule.Annotations = map[string]string{
|
||||
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
|
||||
@ -1368,13 +1369,13 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Traces link test",
|
||||
AlertType: AlertTypeTraces,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1432,8 +1433,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery(queryString).
|
||||
WillReturnRows(rows)
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit
|
||||
postableRule.RuleCondition.TargetUnit = c.targetUnit
|
||||
@ -1477,13 +1478,13 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Logs link test",
|
||||
AlertType: AlertTypeLogs,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1553,8 +1554,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery(queryString).
|
||||
WillReturnRows(rows)
|
||||
postableRule.RuleCondition.CompareOp = CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = MatchType(c.matchType)
|
||||
postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp)
|
||||
postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType)
|
||||
postableRule.RuleCondition.Target = &c.target
|
||||
postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit
|
||||
postableRule.RuleCondition.TargetUnit = c.targetUnit
|
||||
@ -1599,13 +1600,13 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
|
||||
func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
target := float64(10)
|
||||
postableRule := PostableRule{
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Logs link test",
|
||||
AlertType: AlertTypeLogs,
|
||||
RuleType: RuleTypeThreshold,
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
RuleCondition: &RuleCondition{
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
@ -1638,7 +1639,7 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
},
|
||||
},
|
||||
Target: &target,
|
||||
CompareOp: ValueAboveOrEq,
|
||||
CompareOp: ruletypes.ValueAboveOrEq,
|
||||
},
|
||||
}
|
||||
|
||||
|
231
pkg/ruler/rulestore/sqlrulestore/maintenance.go
Normal file
231
pkg/ruler/rulestore/sqlrulestore/maintenance.go
Normal file
@ -0,0 +1,231 @@
|
||||
package sqlrulestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type maintenance struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewMaintenanceStore(store sqlstore.SQLStore) ruletypes.MaintenanceStore {
|
||||
return &maintenance{sqlstore: store}
|
||||
}
|
||||
|
||||
func (r *maintenance) GetAllPlannedMaintenance(ctx context.Context, orgID string) ([]*ruletypes.GettablePlannedMaintenance, error) {
|
||||
gettableMaintenancesRules := make([]*ruletypes.GettablePlannedMaintenanceRule, 0)
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&gettableMaintenancesRules).
|
||||
Relation("Rules").
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gettablePlannedMaintenance := make([]*ruletypes.GettablePlannedMaintenance, 0)
|
||||
for _, gettableMaintenancesRule := range gettableMaintenancesRules {
|
||||
gettablePlannedMaintenance = append(gettablePlannedMaintenance, gettableMaintenancesRule.ConvertGettableMaintenanceRuleToGettableMaintenance())
|
||||
}
|
||||
|
||||
return gettablePlannedMaintenance, nil
|
||||
}
|
||||
|
||||
func (r *maintenance) GetPlannedMaintenanceByID(ctx context.Context, id valuer.UUID) (*ruletypes.GettablePlannedMaintenance, error) {
|
||||
storableMaintenanceRule := new(ruletypes.GettablePlannedMaintenanceRule)
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(storableMaintenanceRule).
|
||||
Relation("Rules").
|
||||
Where("id = ?", id.StringValue()).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return storableMaintenanceRule.ConvertGettableMaintenanceRuleToGettableMaintenance(), nil
|
||||
}
|
||||
|
||||
func (r *maintenance) CreatePlannedMaintenance(ctx context.Context, maintenance ruletypes.GettablePlannedMaintenance) (valuer.UUID, error) {
|
||||
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return valuer.UUID{}, errors.New("no claims found in context")
|
||||
}
|
||||
|
||||
storablePlannedMaintenance := ruletypes.StorablePlannedMaintenance{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: claims.Email,
|
||||
UpdatedBy: claims.Email,
|
||||
},
|
||||
Name: maintenance.Name,
|
||||
Description: maintenance.Description,
|
||||
Schedule: maintenance.Schedule,
|
||||
OrgID: claims.OrgID,
|
||||
}
|
||||
|
||||
maintenanceRules := make([]*ruletypes.StorablePlannedMaintenanceRule, 0)
|
||||
for _, ruleIDStr := range maintenance.RuleIDs {
|
||||
ruleID, err := valuer.NewUUID(ruleIDStr)
|
||||
if err != nil {
|
||||
return valuer.UUID{}, err
|
||||
}
|
||||
|
||||
maintenanceRules = append(maintenanceRules, &ruletypes.StorablePlannedMaintenanceRule{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
PlannedMaintenanceID: storablePlannedMaintenance.ID,
|
||||
RuleID: ruleID,
|
||||
})
|
||||
}
|
||||
|
||||
err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&storablePlannedMaintenance).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(maintenanceRules) > 0 {
|
||||
_, err = r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&maintenanceRules).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return storablePlannedMaintenance.ID, nil
|
||||
}
|
||||
|
||||
func (r *maintenance) DeletePlannedMaintenance(ctx context.Context, id valuer.UUID) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDB().
|
||||
NewDelete().
|
||||
Model(new(ruletypes.StorablePlannedMaintenance)).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *maintenance) EditPlannedMaintenance(ctx context.Context, maintenance ruletypes.GettablePlannedMaintenance, id valuer.UUID) error {
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
return errors.New("no claims found in context")
|
||||
}
|
||||
|
||||
storablePlannedMaintenance := ruletypes.StorablePlannedMaintenance{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: id,
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: maintenance.CreatedAt,
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: maintenance.CreatedBy,
|
||||
UpdatedBy: claims.Email,
|
||||
},
|
||||
Name: maintenance.Name,
|
||||
Description: maintenance.Description,
|
||||
Schedule: maintenance.Schedule,
|
||||
OrgID: claims.OrgID,
|
||||
}
|
||||
|
||||
storablePlannedMaintenanceRules := make([]*ruletypes.StorablePlannedMaintenanceRule, 0)
|
||||
for _, ruleIDStr := range maintenance.RuleIDs {
|
||||
ruleID, err := valuer.NewUUID(ruleIDStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storablePlannedMaintenanceRules = append(storablePlannedMaintenanceRules, &ruletypes.StorablePlannedMaintenanceRule{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
RuleID: ruleID,
|
||||
PlannedMaintenanceID: storablePlannedMaintenance.ID,
|
||||
})
|
||||
}
|
||||
|
||||
err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(&storablePlannedMaintenance).
|
||||
Where("id = ?", storablePlannedMaintenance.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(ruletypes.StorablePlannedMaintenanceRule)).
|
||||
Where("planned_maintenance_id = ?", storablePlannedMaintenance.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(storablePlannedMaintenanceRules) > 0 {
|
||||
_, err = r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&storablePlannedMaintenanceRules).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
})
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
275
pkg/ruler/rulestore/sqlrulestore/rule.go
Normal file
275
pkg/ruler/rulestore/sqlrulestore/rule.go
Normal file
@ -0,0 +1,275 @@
|
||||
package sqlrulestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type rule struct {
|
||||
*sqlx.DB
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewRuleStore(db *sqlx.DB, store sqlstore.SQLStore) ruletypes.RuleStore {
|
||||
return &rule{sqlstore: store, DB: db}
|
||||
}
|
||||
|
||||
func (r *rule) CreateRule(ctx context.Context, storedRule *ruletypes.Rule, cb func(context.Context, valuer.UUID) error) (valuer.UUID, error) {
|
||||
err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(storedRule).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx, storedRule.ID)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return storedRule.ID, nil
|
||||
}
|
||||
|
||||
func (r *rule) EditRule(ctx context.Context, storedRule *ruletypes.Rule, cb func(context.Context) error) error {
|
||||
return r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(storedRule).
|
||||
Where("id = ?", storedRule.ID.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
func (r *rule) DeleteRule(ctx context.Context, id valuer.UUID, cb func(context.Context) error) error {
|
||||
if err := r.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
_, err := r.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(new(ruletypes.Rule)).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return cb(ctx)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *rule) GetStoredRules(ctx context.Context, orgID string) ([]*ruletypes.Rule, error) {
|
||||
rules := make([]*ruletypes.Rule, 0)
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&rules).
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return rules, err
|
||||
}
|
||||
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetStoredRule(ctx context.Context, id valuer.UUID) (*ruletypes.Rule, error) {
|
||||
rule := new(ruletypes.Rule)
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(rule).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetRuleUUID(ctx context.Context, ruleID int) (*ruletypes.RuleHistory, error) {
|
||||
ruleHistory := new(ruletypes.RuleHistory)
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(ruleHistory).
|
||||
Where("rule_id = ?", ruleID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
return ruleHistory, nil
|
||||
}
|
||||
|
||||
func (r *rule) ListOrgs(ctx context.Context) ([]string, error) {
|
||||
orgIDs := []string{}
|
||||
err := r.sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
ColumnExpr("id").
|
||||
Model(new(types.Organization)).
|
||||
Scan(ctx, &orgIDs)
|
||||
if err != nil {
|
||||
return orgIDs, err
|
||||
}
|
||||
|
||||
return orgIDs, nil
|
||||
}
|
||||
|
||||
func (r *rule) getChannels() (*[]model.ChannelItem, *model.ApiError) {
|
||||
channels := []model.ChannelItem{}
|
||||
|
||||
query := "SELECT id, created_at, updated_at, name, type, data FROM notification_channels"
|
||||
|
||||
err := r.Select(&channels, query)
|
||||
|
||||
zap.L().Info(query)
|
||||
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
}
|
||||
|
||||
return &channels, nil
|
||||
}
|
||||
|
||||
func (r *rule) GetAlertsInfo(ctx context.Context) (*model.AlertsInfo, error) {
|
||||
alertsInfo := model.AlertsInfo{}
|
||||
// fetch alerts from rules db
|
||||
query := "SELECT data FROM rules"
|
||||
var alertsData []string
|
||||
var alertNames []string
|
||||
err := r.Select(&alertsData, query)
|
||||
if err != nil {
|
||||
zap.L().Error("Error in processing sql query", zap.Error(err))
|
||||
return &alertsInfo, err
|
||||
}
|
||||
for _, alert := range alertsData {
|
||||
var rule ruletypes.GettableRule
|
||||
if strings.Contains(alert, "time_series_v2") {
|
||||
alertsInfo.AlertsWithTSV2 = alertsInfo.AlertsWithTSV2 + 1
|
||||
}
|
||||
err = json.Unmarshal([]byte(alert), &rule)
|
||||
if err != nil {
|
||||
zap.L().Error("invalid rule data", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
alertNames = append(alertNames, rule.AlertName)
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
alertsInfo.LogsBasedAlerts = alertsInfo.LogsBasedAlerts + 1
|
||||
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
if strings.Contains(alert, "signoz_logs.distributed_logs") ||
|
||||
strings.Contains(alert, "signoz_logs.logs") {
|
||||
alertsInfo.AlertsWithLogsChQuery = alertsInfo.AlertsWithLogsChQuery + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.BuilderQueries {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
if query.Filters != nil {
|
||||
for _, item := range query.Filters.Items {
|
||||
if slices.Contains([]string{"contains", "ncontains", "like", "nlike"}, string(item.Operator)) {
|
||||
if item.Key.Key != "body" {
|
||||
alertsInfo.AlertsWithLogsContainsOp += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if rule.AlertType == ruletypes.AlertTypeMetric {
|
||||
alertsInfo.MetricBasedAlerts = alertsInfo.MetricBasedAlerts + 1
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
alertsInfo.MetricsBuilderQueries = alertsInfo.MetricsBuilderQueries + 1
|
||||
} else if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
alertsInfo.MetricsClickHouseQueries = alertsInfo.MetricsClickHouseQueries + 1
|
||||
} else if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL {
|
||||
alertsInfo.MetricsPrometheusQueries = alertsInfo.MetricsPrometheusQueries + 1
|
||||
for _, query := range rule.RuleCondition.CompositeQuery.PromQueries {
|
||||
if strings.Contains(query.Query, "signoz_") {
|
||||
alertsInfo.SpanMetricsPrometheusQueries = alertsInfo.SpanMetricsPrometheusQueries + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
alertsInfo.AnomalyBasedAlerts = alertsInfo.AnomalyBasedAlerts + 1
|
||||
}
|
||||
} else if rule.AlertType == ruletypes.AlertTypeTraces {
|
||||
alertsInfo.TracesBasedAlerts = alertsInfo.TracesBasedAlerts + 1
|
||||
|
||||
if rule.RuleCondition != nil && rule.RuleCondition.CompositeQuery != nil {
|
||||
if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeClickHouseSQL {
|
||||
if strings.Contains(alert, "signoz_traces.distributed_signoz_index_v2") ||
|
||||
strings.Contains(alert, "signoz_traces.distributed_signoz_spans") ||
|
||||
strings.Contains(alert, "signoz_traces.distributed_signoz_error_index_v2") {
|
||||
alertsInfo.AlertsWithTraceChQuery = alertsInfo.AlertsWithTraceChQuery + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
alertsInfo.TotalAlerts = alertsInfo.TotalAlerts + 1
|
||||
if !rule.PostableRule.Disabled {
|
||||
alertsInfo.TotalActiveAlerts = alertsInfo.TotalActiveAlerts + 1
|
||||
}
|
||||
}
|
||||
alertsInfo.AlertNames = alertNames
|
||||
|
||||
channels, _ := r.getChannels()
|
||||
if channels != nil {
|
||||
alertsInfo.TotalChannels = len(*channels)
|
||||
for _, channel := range *channels {
|
||||
if channel.Type == "slack" {
|
||||
alertsInfo.SlackChannels = alertsInfo.SlackChannels + 1
|
||||
}
|
||||
if channel.Type == "webhook" {
|
||||
alertsInfo.WebHookChannels = alertsInfo.WebHookChannels + 1
|
||||
}
|
||||
if channel.Type == "email" {
|
||||
alertsInfo.EmailChannels = alertsInfo.EmailChannels + 1
|
||||
}
|
||||
if channel.Type == "pagerduty" {
|
||||
alertsInfo.PagerDutyChannels = alertsInfo.PagerDutyChannels + 1
|
||||
}
|
||||
if channel.Type == "opsgenie" {
|
||||
alertsInfo.OpsGenieChannels = alertsInfo.OpsGenieChannels + 1
|
||||
}
|
||||
if channel.Type == "msteams" {
|
||||
alertsInfo.MSTeamsChannels = alertsInfo.MSTeamsChannels + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &alertsInfo, nil
|
||||
}
|
@ -69,6 +69,7 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
|
||||
sqlmigration.NewUpdatePreferencesFactory(sqlstore),
|
||||
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
|
||||
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
|
||||
sqlmigration.NewUpdateRulesFactory(sqlstore),
|
||||
sqlmigration.NewAddVirtualFieldsFactory(),
|
||||
sqlmigration.NewUpdateIntegrationsFactory(sqlstore),
|
||||
)
|
||||
|
348
pkg/sqlmigration/027_update_rules.go
Normal file
348
pkg/sqlmigration/027_update_rules.go
Normal file
@ -0,0 +1,348 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateRules struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type AlertIds []string
|
||||
|
||||
func (a *AlertIds) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, a)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AlertIds) Value() (driver.Value, error) {
|
||||
return json.Marshal(a)
|
||||
}
|
||||
|
||||
type existingRule struct {
|
||||
bun.BaseModel `bun:"table:rules"`
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
CreatedBy string `bun:"created_by,type:text,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
UpdatedBy string `bun:"updated_by,type:text,notnull"`
|
||||
Deleted int `bun:"deleted,notnull,default:0"`
|
||||
Data string `bun:"data,type:text,notnull"`
|
||||
}
|
||||
|
||||
type newRule struct {
|
||||
bun.BaseModel `bun:"table:rule"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
types.UserAuditable
|
||||
Deleted int `bun:"deleted,notnull,default:0"`
|
||||
Data string `bun:"data,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text"`
|
||||
}
|
||||
|
||||
type existingMaintenance struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance"`
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
Name string `bun:"name,type:text,notnull"`
|
||||
Description string `bun:"description,type:text"`
|
||||
AlertIDs *AlertIds `bun:"alert_ids,type:text"`
|
||||
Schedule *ruletypes.Schedule `bun:"schedule,type:text,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
CreatedBy string `bun:"created_by,type:text,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
UpdatedBy string `bun:"updated_by,type:text,notnull"`
|
||||
}
|
||||
|
||||
type newMaintenance struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance_new"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
types.UserAuditable
|
||||
Name string `bun:"name,type:text,notnull"`
|
||||
Description string `bun:"description,type:text"`
|
||||
Schedule *ruletypes.Schedule `bun:"schedule,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text"`
|
||||
}
|
||||
|
||||
type storablePlannedMaintenanceRule struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance_rule"`
|
||||
types.Identifiable
|
||||
PlannedMaintenanceID valuer.UUID `bun:"planned_maintenance_id,type:text"`
|
||||
RuleID valuer.UUID `bun:"rule_id,type:text"`
|
||||
}
|
||||
|
||||
type ruleHistory struct {
|
||||
bun.BaseModel `bun:"table:rule_history"`
|
||||
RuleID int `bun:"rule_id"`
|
||||
RuleUUID valuer.UUID `bun:"rule_uuid"`
|
||||
}
|
||||
|
||||
func NewUpdateRulesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_rules"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateRules(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateRules(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateRules{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateRules) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateRules) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
ruleIDToRuleUUIDMap := map[int]valuer.UUID{}
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingRule), new(newRule), []string{OrgReference}, func(ctx context.Context) error {
|
||||
existingRules := make([]*existingRule, 0)
|
||||
err := tx.
|
||||
NewSelect().
|
||||
Model(&existingRules).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err == nil && len(existingRules) > 0 {
|
||||
var orgID string
|
||||
err := migration.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model((*types.Organization)(nil)).
|
||||
Column("id").
|
||||
Scan(ctx, &orgID)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
newRules, idUUIDMap := migration.CopyExistingRulesToNewRules(existingRules, orgID)
|
||||
ruleIDToRuleUUIDMap = idUUIDMap
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newRules).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
err = migration.store.Dialect().UpdatePrimaryKey(ctx, tx, new(existingMaintenance), new(newMaintenance), OrgReference, func(ctx context.Context) error {
|
||||
_, err := tx.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(new(storablePlannedMaintenanceRule)).
|
||||
ForeignKey(`("planned_maintenance_id") REFERENCES "planned_maintenance_new" ("id") ON DELETE CASCADE ON UPDATE CASCADE`).
|
||||
ForeignKey(`("rule_id") REFERENCES "rule" ("id")`).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingMaintenances := make([]*existingMaintenance, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingMaintenances).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err == nil && len(existingMaintenances) > 0 {
|
||||
var orgID string
|
||||
err := migration.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model((*types.Organization)(nil)).
|
||||
Column("id").
|
||||
Scan(ctx, &orgID)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
newMaintenances, newMaintenancesRules, err := migration.CopyExistingMaintenancesToNewMaintenancesAndRules(existingMaintenances, orgID, ruleIDToRuleUUIDMap)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newMaintenances).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(newMaintenancesRules) > 0 {
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newMaintenancesRules).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ruleHistories := make([]*ruleHistory, 0)
|
||||
for ruleID, ruleUUID := range ruleIDToRuleUUIDMap {
|
||||
ruleHistories = append(ruleHistories, &ruleHistory{
|
||||
RuleID: ruleID,
|
||||
RuleUUID: ruleUUID,
|
||||
})
|
||||
}
|
||||
|
||||
_, err = tx.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(&ruleHistories).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(ruleHistories) > 0 {
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&ruleHistories).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateRules) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateRules) CopyExistingRulesToNewRules(existingRules []*existingRule, orgID string) ([]*newRule, map[int]valuer.UUID) {
|
||||
newRules := make([]*newRule, 0)
|
||||
idUUIDMap := map[int]valuer.UUID{}
|
||||
for _, rule := range existingRules {
|
||||
uuid := valuer.GenerateUUID()
|
||||
idUUIDMap[rule.ID] = uuid
|
||||
newRules = append(newRules, &newRule{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: uuid,
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: rule.CreatedAt,
|
||||
UpdatedAt: rule.UpdatedAt,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: rule.CreatedBy,
|
||||
UpdatedBy: rule.UpdatedBy,
|
||||
},
|
||||
Deleted: rule.Deleted,
|
||||
Data: rule.Data,
|
||||
OrgID: orgID,
|
||||
})
|
||||
}
|
||||
return newRules, idUUIDMap
|
||||
}
|
||||
|
||||
func (migration *updateRules) CopyExistingMaintenancesToNewMaintenancesAndRules(existingMaintenances []*existingMaintenance, orgID string, ruleIDToRuleUUIDMap map[int]valuer.UUID) ([]*newMaintenance, []*storablePlannedMaintenanceRule, error) {
|
||||
newMaintenances := make([]*newMaintenance, 0)
|
||||
newMaintenanceRules := make([]*storablePlannedMaintenanceRule, 0)
|
||||
|
||||
for _, maintenance := range existingMaintenances {
|
||||
ruleIDs := maintenance.AlertIDs
|
||||
maintenanceUUID := valuer.GenerateUUID()
|
||||
newMaintenance := newMaintenance{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: maintenanceUUID,
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: maintenance.CreatedAt,
|
||||
UpdatedAt: maintenance.UpdatedAt,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: maintenance.CreatedBy,
|
||||
UpdatedBy: maintenance.UpdatedBy,
|
||||
},
|
||||
Name: maintenance.Name,
|
||||
Description: maintenance.Description,
|
||||
Schedule: maintenance.Schedule,
|
||||
OrgID: orgID,
|
||||
}
|
||||
newMaintenances = append(newMaintenances, &newMaintenance)
|
||||
for _, ruleIDStr := range *ruleIDs {
|
||||
ruleID, err := strconv.Atoi(ruleIDStr)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
newMaintenanceRules = append(newMaintenanceRules, &storablePlannedMaintenanceRule{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
PlannedMaintenanceID: maintenanceUUID,
|
||||
RuleID: ruleIDToRuleUUIDMap[ruleID],
|
||||
})
|
||||
}
|
||||
}
|
||||
return newMaintenances, newMaintenanceRules, nil
|
||||
}
|
@ -18,7 +18,8 @@ func FromGlobs(paths []string) (*alertmanagertemplate.Template, error) {
|
||||
}
|
||||
|
||||
if err := t.Parse(bytes.NewReader([]byte(`
|
||||
{{ define "__ruleIdPath" }}{{ range .CommonLabels.SortedPairs }}{{ if eq .Name "ruleId" }}{{ if match "^[0-9]+$" .Value }}/edit?ruleId={{ .Value | urlquery }}{{ end }}{{ end }}{{ end }}{{ end }}
|
||||
|
||||
{{ define "__ruleIdPath" }}{{ range .CommonLabels.SortedPairs }}{{ if eq .Name "ruleId" }}{{ if match "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" .Value }}/edit?ruleId={{ .Value | urlquery }}{{ end }}{{ end }}{{ end }}{{ end }}
|
||||
{{ define "__alertmanagerURL" }}{{ .ExternalURL }}/alerts{{ template "__ruleIdPath" . }}{{ end }}
|
||||
`))); err != nil {
|
||||
return nil, fmt.Errorf("error parsing alertmanager templates: %w", err)
|
||||
|
@ -27,14 +27,29 @@ func TestFromGlobs(t *testing.T) {
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "439453587",
|
||||
"ruleId": "01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
Timeout: false,
|
||||
},
|
||||
},
|
||||
expected: "http://localhost:8080/alerts/edit?ruleId=439453587",
|
||||
expected: "http://localhost:8080/alerts/edit?ruleId=01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
{
|
||||
name: "SingleAlertWithValidRuleUUIDv4",
|
||||
alerts: []*types.Alert{
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "2d8edca5-4f24-4266-afd1-28cefadcfa88",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
Timeout: false,
|
||||
},
|
||||
},
|
||||
expected: "http://localhost:8080/alerts/edit?ruleId=2d8edca5-4f24-4266-afd1-28cefadcfa88",
|
||||
},
|
||||
{
|
||||
name: "SingleAlertWithInvalidRuleId",
|
||||
@ -57,7 +72,7 @@ func TestFromGlobs(t *testing.T) {
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "1",
|
||||
"ruleId": "01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
@ -66,7 +81,7 @@ func TestFromGlobs(t *testing.T) {
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "2",
|
||||
"ruleId": "0196156c-990e-7ec5-b28f-8a3cfbb9c865",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
@ -81,7 +96,7 @@ func TestFromGlobs(t *testing.T) {
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "1",
|
||||
"ruleId": "01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
@ -90,14 +105,14 @@ func TestFromGlobs(t *testing.T) {
|
||||
{
|
||||
Alert: model.Alert{
|
||||
Labels: model.LabelSet{
|
||||
"ruleId": "1",
|
||||
"ruleId": "01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
},
|
||||
UpdatedAt: time.Now(),
|
||||
Timeout: false,
|
||||
},
|
||||
},
|
||||
expected: "http://localhost:8080/alerts/edit?ruleId=1",
|
||||
expected: "http://localhost:8080/alerts/edit?ruleId=01961575-461c-7668-875f-05d374062bfc",
|
||||
},
|
||||
{
|
||||
name: "MultipleAlertsWithNoRuleId",
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gosimple/slug"
|
||||
"github.com/uptrace/bun"
|
||||
@ -68,32 +67,6 @@ func (c *DashboardData) Scan(src interface{}) error {
|
||||
return json.Unmarshal(data, c)
|
||||
}
|
||||
|
||||
type Rule struct {
|
||||
bun.BaseModel `bun:"table:rules"`
|
||||
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
CreatedBy string `bun:"created_by,type:text,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
UpdatedBy string `bun:"updated_by,type:text,notnull"`
|
||||
Deleted int `bun:"deleted,notnull,default:0"`
|
||||
Data string `bun:"data,type:text,notnull"`
|
||||
}
|
||||
|
||||
type PlannedMaintenance struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance"`
|
||||
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
Name string `bun:"name,type:text,notnull"`
|
||||
Description string `bun:"description,type:text"`
|
||||
AlertIDs string `bun:"alert_ids,type:text"`
|
||||
Schedule string `bun:"schedule,type:text,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
CreatedBy string `bun:"created_by,type:text,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
UpdatedBy string `bun:"updated_by,type:text,notnull"`
|
||||
}
|
||||
|
||||
type TTLSetting struct {
|
||||
bun.BaseModel `bun:"table:ttl_setting"`
|
||||
Identifiable
|
||||
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// this file contains common structs and methods used by
|
||||
@ -20,8 +19,7 @@ import (
|
||||
const (
|
||||
// how long before re-sending the alert
|
||||
ResolvedRetention = 15 * time.Minute
|
||||
|
||||
TestAlertPostFix = "_TEST_ALERT"
|
||||
TestAlertPostFix = "_TEST_ALERT"
|
||||
)
|
||||
|
||||
type RuleType string
|
||||
@ -63,7 +61,7 @@ type Alert struct {
|
||||
Missing bool
|
||||
}
|
||||
|
||||
func (a *Alert) needsSending(ts time.Time, resendDelay time.Duration) bool {
|
||||
func (a *Alert) NeedsSending(ts time.Time, resendDelay time.Duration) bool {
|
||||
if a.State == model.StatePending {
|
||||
return false
|
||||
}
|
||||
@ -201,39 +199,11 @@ func (rc *RuleCondition) String() string {
|
||||
return string(data)
|
||||
}
|
||||
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d *Duration) UnmarshalJSON(b []byte) error {
|
||||
var v interface{}
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
*d = Duration(time.Duration(value))
|
||||
return nil
|
||||
case string:
|
||||
tmp, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*d = Duration(tmp)
|
||||
|
||||
return nil
|
||||
default:
|
||||
return errors.New("invalid duration")
|
||||
}
|
||||
}
|
||||
|
||||
// prepareRuleGeneratorURL creates an appropriate url
|
||||
// for the rule. the URL is sent in slack messages as well as
|
||||
// to other systems and allows backtracking to the rule definition
|
||||
// from the third party systems.
|
||||
func prepareRuleGeneratorURL(ruleId string, source string) string {
|
||||
func PrepareRuleGeneratorURL(ruleId string, source string) string {
|
||||
if source == "" {
|
||||
return source
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
@ -70,16 +70,16 @@ type PostableRule struct {
|
||||
}
|
||||
|
||||
func ParsePostableRule(content []byte) (*PostableRule, error) {
|
||||
return parsePostableRule(content, "json")
|
||||
return ParsePostableRuleWithKind(content, "json")
|
||||
}
|
||||
|
||||
func parsePostableRule(content []byte, kind RuleDataKind) (*PostableRule, error) {
|
||||
return parseIntoRule(PostableRule{}, content, kind)
|
||||
func ParsePostableRuleWithKind(content []byte, kind RuleDataKind) (*PostableRule, error) {
|
||||
return ParseIntoRule(PostableRule{}, content, kind)
|
||||
}
|
||||
|
||||
// parseIntoRule loads the content (data) into PostableRule and also
|
||||
// validates the end result
|
||||
func parseIntoRule(initRule PostableRule, content []byte, kind RuleDataKind) (*PostableRule, error) {
|
||||
func ParseIntoRule(initRule PostableRule, content []byte, kind RuleDataKind) (*PostableRule, error) {
|
||||
rule := &initRule
|
||||
|
||||
var err error
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"testing"
|
@ -1,237 +1,71 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrMissingName = errors.New("missing name")
|
||||
ErrMissingSchedule = errors.New("missing schedule")
|
||||
ErrMissingTimezone = errors.New("missing timezone")
|
||||
ErrMissingRepeatType = errors.New("missing repeat type")
|
||||
ErrMissingDuration = errors.New("missing duration")
|
||||
ErrCodeInvalidPlannedMaintenancePayload = errors.MustNewCode("invalid_planned_maintenance_payload")
|
||||
)
|
||||
|
||||
type PlannedMaintenance struct {
|
||||
Id int64 `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Description string `json:"description" db:"description"`
|
||||
Schedule *Schedule `json:"schedule" db:"schedule"`
|
||||
AlertIds *AlertIds `json:"alertIds" db:"alert_ids"`
|
||||
CreatedAt time.Time `json:"createdAt" db:"created_at"`
|
||||
CreatedBy string `json:"createdBy" db:"created_by"`
|
||||
UpdatedAt time.Time `json:"updatedAt" db:"updated_at"`
|
||||
UpdatedBy string `json:"updatedBy" db:"updated_by"`
|
||||
type StorablePlannedMaintenance struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
types.UserAuditable
|
||||
Name string `bun:"name,type:text,notnull"`
|
||||
Description string `bun:"description,type:text"`
|
||||
Schedule *Schedule `bun:"schedule,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text"`
|
||||
}
|
||||
|
||||
type GettablePlannedMaintenance struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Schedule *Schedule `json:"schedule"`
|
||||
RuleIDs []string `json:"alertIds"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
CreatedBy string `json:"createdBy"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
UpdatedBy string `json:"updatedBy"`
|
||||
Status string `json:"status"`
|
||||
Kind string `json:"kind"`
|
||||
}
|
||||
|
||||
type AlertIds []string
|
||||
|
||||
func (a *AlertIds) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, a)
|
||||
}
|
||||
return nil
|
||||
type StorablePlannedMaintenanceRule struct {
|
||||
bun.BaseModel `bun:"table:planned_maintenance_rule"`
|
||||
types.Identifiable
|
||||
PlannedMaintenanceID valuer.UUID `bun:"planned_maintenance_id,type:text"`
|
||||
RuleID valuer.UUID `bun:"rule_id,type:text"`
|
||||
}
|
||||
|
||||
func (a *AlertIds) Value() (driver.Value, error) {
|
||||
return json.Marshal(a)
|
||||
type GettablePlannedMaintenanceRule struct {
|
||||
*StorablePlannedMaintenance `bun:",extend"`
|
||||
Rules []*StorablePlannedMaintenanceRule `bun:"rel:has-many,join:id=planned_maintenance_id"`
|
||||
}
|
||||
|
||||
type Schedule struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime time.Time `json:"startTime,omitempty"`
|
||||
EndTime time.Time `json:"endTime,omitempty"`
|
||||
Recurrence *Recurrence `json:"recurrence"`
|
||||
}
|
||||
|
||||
func (s *Schedule) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, s)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Schedule) Value() (driver.Value, error) {
|
||||
return json.Marshal(s)
|
||||
}
|
||||
|
||||
type RepeatType string
|
||||
|
||||
const (
|
||||
RepeatTypeDaily RepeatType = "daily"
|
||||
RepeatTypeWeekly RepeatType = "weekly"
|
||||
RepeatTypeMonthly RepeatType = "monthly"
|
||||
)
|
||||
|
||||
type RepeatOn string
|
||||
|
||||
const (
|
||||
RepeatOnSunday RepeatOn = "sunday"
|
||||
RepeatOnMonday RepeatOn = "monday"
|
||||
RepeatOnTuesday RepeatOn = "tuesday"
|
||||
RepeatOnWednesday RepeatOn = "wednesday"
|
||||
RepeatOnThursday RepeatOn = "thursday"
|
||||
RepeatOnFriday RepeatOn = "friday"
|
||||
RepeatOnSaturday RepeatOn = "saturday"
|
||||
)
|
||||
|
||||
var RepeatOnAllMap = map[RepeatOn]time.Weekday{
|
||||
RepeatOnSunday: time.Sunday,
|
||||
RepeatOnMonday: time.Monday,
|
||||
RepeatOnTuesday: time.Tuesday,
|
||||
RepeatOnWednesday: time.Wednesday,
|
||||
RepeatOnThursday: time.Thursday,
|
||||
RepeatOnFriday: time.Friday,
|
||||
RepeatOnSaturday: time.Saturday,
|
||||
}
|
||||
|
||||
type Recurrence struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime *time.Time `json:"endTime,omitempty"`
|
||||
Duration Duration `json:"duration"`
|
||||
RepeatType RepeatType `json:"repeatType"`
|
||||
RepeatOn []RepeatOn `json:"repeatOn"`
|
||||
}
|
||||
|
||||
func (r *Recurrence) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Recurrence) Value() (driver.Value, error) {
|
||||
return json.Marshal(r)
|
||||
}
|
||||
|
||||
func (s Schedule) MarshalJSON() ([]byte, error) {
|
||||
loc, err := time.LoadLocation(s.Timezone)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var startTime, endTime time.Time
|
||||
if !s.StartTime.IsZero() {
|
||||
startTime = time.Date(s.StartTime.Year(), s.StartTime.Month(), s.StartTime.Day(), s.StartTime.Hour(), s.StartTime.Minute(), s.StartTime.Second(), s.StartTime.Nanosecond(), loc)
|
||||
}
|
||||
if !s.EndTime.IsZero() {
|
||||
endTime = time.Date(s.EndTime.Year(), s.EndTime.Month(), s.EndTime.Day(), s.EndTime.Hour(), s.EndTime.Minute(), s.EndTime.Second(), s.EndTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
var recurrence *Recurrence
|
||||
if s.Recurrence != nil {
|
||||
recStartTime := time.Date(s.Recurrence.StartTime.Year(), s.Recurrence.StartTime.Month(), s.Recurrence.StartTime.Day(), s.Recurrence.StartTime.Hour(), s.Recurrence.StartTime.Minute(), s.Recurrence.StartTime.Second(), s.Recurrence.StartTime.Nanosecond(), loc)
|
||||
var recEndTime *time.Time
|
||||
if s.Recurrence.EndTime != nil {
|
||||
end := time.Date(s.Recurrence.EndTime.Year(), s.Recurrence.EndTime.Month(), s.Recurrence.EndTime.Day(), s.Recurrence.EndTime.Hour(), s.Recurrence.EndTime.Minute(), s.Recurrence.EndTime.Second(), s.Recurrence.EndTime.Nanosecond(), loc)
|
||||
recEndTime = &end
|
||||
}
|
||||
recurrence = &Recurrence{
|
||||
StartTime: recStartTime,
|
||||
EndTime: recEndTime,
|
||||
Duration: s.Recurrence.Duration,
|
||||
RepeatType: s.Recurrence.RepeatType,
|
||||
RepeatOn: s.Recurrence.RepeatOn,
|
||||
}
|
||||
}
|
||||
|
||||
return json.Marshal(&struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime string `json:"startTime"`
|
||||
EndTime string `json:"endTime"`
|
||||
Recurrence *Recurrence `json:"recurrence,omitempty"`
|
||||
}{
|
||||
Timezone: s.Timezone,
|
||||
StartTime: startTime.Format(time.RFC3339),
|
||||
EndTime: endTime.Format(time.RFC3339),
|
||||
Recurrence: recurrence,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Schedule) UnmarshalJSON(data []byte) error {
|
||||
aux := &struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime string `json:"startTime"`
|
||||
EndTime string `json:"endTime"`
|
||||
Recurrence *Recurrence `json:"recurrence,omitempty"`
|
||||
}{}
|
||||
if err := json.Unmarshal(data, aux); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
loc, err := time.LoadLocation(aux.Timezone)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var startTime time.Time
|
||||
if aux.StartTime != "" {
|
||||
startTime, err = time.Parse(time.RFC3339, aux.StartTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.StartTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), startTime.Hour(), startTime.Minute(), startTime.Second(), startTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
var endTime time.Time
|
||||
if aux.EndTime != "" {
|
||||
endTime, err = time.Parse(time.RFC3339, aux.EndTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.EndTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), endTime.Hour(), endTime.Minute(), endTime.Second(), endTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
s.Timezone = aux.Timezone
|
||||
|
||||
if aux.Recurrence != nil {
|
||||
recStartTime, err := time.Parse(time.RFC3339, aux.Recurrence.StartTime.Format(time.RFC3339))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var recEndTime *time.Time
|
||||
if aux.Recurrence.EndTime != nil {
|
||||
end, err := time.Parse(time.RFC3339, aux.Recurrence.EndTime.Format(time.RFC3339))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
endConverted := time.Date(end.Year(), end.Month(), end.Day(), end.Hour(), end.Minute(), end.Second(), end.Nanosecond(), loc)
|
||||
recEndTime = &endConverted
|
||||
}
|
||||
|
||||
s.Recurrence = &Recurrence{
|
||||
StartTime: time.Date(recStartTime.Year(), recStartTime.Month(), recStartTime.Day(), recStartTime.Hour(), recStartTime.Minute(), recStartTime.Second(), recStartTime.Nanosecond(), loc),
|
||||
EndTime: recEndTime,
|
||||
Duration: aux.Recurrence.Duration,
|
||||
RepeatType: aux.Recurrence.RepeatType,
|
||||
RepeatOn: aux.Recurrence.RepeatOn,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *PlannedMaintenance) shouldSkip(ruleID string, now time.Time) bool {
|
||||
func (m *GettablePlannedMaintenance) ShouldSkip(ruleID string, now time.Time) bool {
|
||||
// Check if the alert ID is in the maintenance window
|
||||
found := false
|
||||
if m.AlertIds != nil {
|
||||
for _, alertID := range *m.AlertIds {
|
||||
if len(m.RuleIDs) > 0 {
|
||||
for _, alertID := range m.RuleIDs {
|
||||
if alertID == ruleID {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no alert ids, then skip all alerts
|
||||
if m.AlertIds == nil || len(*m.AlertIds) == 0 {
|
||||
if len(m.RuleIDs) == 0 {
|
||||
found = true
|
||||
}
|
||||
|
||||
@ -240,7 +74,6 @@ func (m *PlannedMaintenance) shouldSkip(ruleID string, now time.Time) bool {
|
||||
}
|
||||
|
||||
zap.L().Info("alert found in maintenance", zap.String("alert", ruleID), zap.String("maintenance", m.Name))
|
||||
|
||||
// If alert is found, we check if it should be skipped based on the schedule
|
||||
loc, err := time.LoadLocation(m.Schedule.Timezone)
|
||||
if err != nil {
|
||||
@ -312,7 +145,7 @@ func (m *PlannedMaintenance) shouldSkip(ruleID string, now time.Time) bool {
|
||||
|
||||
// checkDaily rebases the recurrence start to today (or yesterday if needed)
|
||||
// and returns true if currentTime is within [candidate, candidate+Duration].
|
||||
func (m *PlannedMaintenance) checkDaily(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
func (m *GettablePlannedMaintenance) checkDaily(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
candidate := time.Date(
|
||||
currentTime.Year(), currentTime.Month(), currentTime.Day(),
|
||||
rec.StartTime.Hour(), rec.StartTime.Minute(), 0, 0,
|
||||
@ -327,7 +160,7 @@ func (m *PlannedMaintenance) checkDaily(currentTime time.Time, rec *Recurrence,
|
||||
// checkWeekly finds the most recent allowed occurrence by rebasing the recurrence’s
|
||||
// time-of-day onto the allowed weekday. It does this for each allowed day and returns true
|
||||
// if the current time falls within the candidate window.
|
||||
func (m *PlannedMaintenance) checkWeekly(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
func (m *GettablePlannedMaintenance) checkWeekly(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
// If no days specified, treat as every day (like daily).
|
||||
if len(rec.RepeatOn) == 0 {
|
||||
return m.checkDaily(currentTime, rec, loc)
|
||||
@ -359,7 +192,7 @@ func (m *PlannedMaintenance) checkWeekly(currentTime time.Time, rec *Recurrence,
|
||||
|
||||
// checkMonthly rebases the candidate occurrence using the recurrence's day-of-month.
|
||||
// If the candidate for the current month is in the future, it uses the previous month.
|
||||
func (m *PlannedMaintenance) checkMonthly(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
func (m *GettablePlannedMaintenance) checkMonthly(currentTime time.Time, rec *Recurrence, loc *time.Location) bool {
|
||||
refDay := rec.StartTime.Day()
|
||||
year, month, _ := currentTime.Date()
|
||||
lastDay := time.Date(year, month+1, 0, 0, 0, 0, 0, loc).Day()
|
||||
@ -391,15 +224,15 @@ func (m *PlannedMaintenance) checkMonthly(currentTime time.Time, rec *Recurrence
|
||||
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
|
||||
}
|
||||
|
||||
func (m *PlannedMaintenance) IsActive(now time.Time) bool {
|
||||
func (m *GettablePlannedMaintenance) IsActive(now time.Time) bool {
|
||||
ruleID := "maintenance"
|
||||
if m.AlertIds != nil && len(*m.AlertIds) > 0 {
|
||||
ruleID = (*m.AlertIds)[0]
|
||||
if len(m.RuleIDs) > 0 {
|
||||
ruleID = (m.RuleIDs)[0]
|
||||
}
|
||||
return m.shouldSkip(ruleID, now)
|
||||
return m.ShouldSkip(ruleID, now)
|
||||
}
|
||||
|
||||
func (m *PlannedMaintenance) IsUpcoming() bool {
|
||||
func (m *GettablePlannedMaintenance) IsUpcoming() bool {
|
||||
loc, err := time.LoadLocation(m.Schedule.Timezone)
|
||||
if err != nil {
|
||||
// handle error appropriately, for example log and return false or fallback to UTC
|
||||
@ -417,47 +250,47 @@ func (m *PlannedMaintenance) IsUpcoming() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *PlannedMaintenance) IsRecurring() bool {
|
||||
func (m *GettablePlannedMaintenance) IsRecurring() bool {
|
||||
return m.Schedule.Recurrence != nil
|
||||
}
|
||||
|
||||
func (m *PlannedMaintenance) Validate() error {
|
||||
func (m *GettablePlannedMaintenance) Validate() error {
|
||||
if m.Name == "" {
|
||||
return ErrMissingName
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing name in the payload")
|
||||
}
|
||||
if m.Schedule == nil {
|
||||
return ErrMissingSchedule
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing schedule in the payload")
|
||||
}
|
||||
if m.Schedule.Timezone == "" {
|
||||
return ErrMissingTimezone
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing timezone in the payload")
|
||||
}
|
||||
|
||||
_, err := time.LoadLocation(m.Schedule.Timezone)
|
||||
if err != nil {
|
||||
return errors.New("invalid timezone")
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "invalid timezone in the payload")
|
||||
}
|
||||
|
||||
if !m.Schedule.StartTime.IsZero() && !m.Schedule.EndTime.IsZero() {
|
||||
if m.Schedule.StartTime.After(m.Schedule.EndTime) {
|
||||
return errors.New("start time cannot be after end time")
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "start time cannot be after end time")
|
||||
}
|
||||
}
|
||||
|
||||
if m.Schedule.Recurrence != nil {
|
||||
if m.Schedule.Recurrence.RepeatType == "" {
|
||||
return ErrMissingRepeatType
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing repeat type in the payload")
|
||||
}
|
||||
if m.Schedule.Recurrence.Duration == 0 {
|
||||
return ErrMissingDuration
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing duration in the payload")
|
||||
}
|
||||
if m.Schedule.Recurrence.EndTime != nil && m.Schedule.Recurrence.EndTime.Before(m.Schedule.Recurrence.StartTime) {
|
||||
return errors.New("end time cannot be before start time")
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "end time cannot be before start time")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m PlannedMaintenance) MarshalJSON() ([]byte, error) {
|
||||
func (m GettablePlannedMaintenance) MarshalJSON() ([]byte, error) {
|
||||
now := time.Now().In(time.FixedZone(m.Schedule.Timezone, 0))
|
||||
var status string
|
||||
if m.IsActive(now) {
|
||||
@ -476,11 +309,11 @@ func (m PlannedMaintenance) MarshalJSON() ([]byte, error) {
|
||||
}
|
||||
|
||||
return json.Marshal(struct {
|
||||
Id int64 `json:"id" db:"id"`
|
||||
Id string `json:"id" db:"id"`
|
||||
Name string `json:"name" db:"name"`
|
||||
Description string `json:"description" db:"description"`
|
||||
Schedule *Schedule `json:"schedule" db:"schedule"`
|
||||
AlertIds *AlertIds `json:"alertIds" db:"alert_ids"`
|
||||
AlertIds []string `json:"alertIds" db:"alert_ids"`
|
||||
CreatedAt time.Time `json:"createdAt" db:"created_at"`
|
||||
CreatedBy string `json:"createdBy" db:"created_by"`
|
||||
UpdatedAt time.Time `json:"updatedAt" db:"updated_at"`
|
||||
@ -492,7 +325,7 @@ func (m PlannedMaintenance) MarshalJSON() ([]byte, error) {
|
||||
Name: m.Name,
|
||||
Description: m.Description,
|
||||
Schedule: m.Schedule,
|
||||
AlertIds: m.AlertIds,
|
||||
AlertIds: m.RuleIDs,
|
||||
CreatedAt: m.CreatedAt,
|
||||
CreatedBy: m.CreatedBy,
|
||||
UpdatedAt: m.UpdatedAt,
|
||||
@ -501,3 +334,32 @@ func (m PlannedMaintenance) MarshalJSON() ([]byte, error) {
|
||||
Kind: kind,
|
||||
})
|
||||
}
|
||||
|
||||
func (m *GettablePlannedMaintenanceRule) ConvertGettableMaintenanceRuleToGettableMaintenance() *GettablePlannedMaintenance {
|
||||
ruleIDs := []string{}
|
||||
if m.Rules != nil {
|
||||
for _, storableMaintenanceRule := range m.Rules {
|
||||
ruleIDs = append(ruleIDs, storableMaintenanceRule.RuleID.StringValue())
|
||||
}
|
||||
}
|
||||
|
||||
return &GettablePlannedMaintenance{
|
||||
Id: m.ID.StringValue(),
|
||||
Name: m.Name,
|
||||
Description: m.Description,
|
||||
Schedule: m.Schedule,
|
||||
RuleIDs: ruleIDs,
|
||||
CreatedAt: m.CreatedAt,
|
||||
UpdatedAt: m.UpdatedAt,
|
||||
CreatedBy: m.CreatedBy,
|
||||
UpdatedBy: m.UpdatedBy,
|
||||
}
|
||||
}
|
||||
|
||||
type MaintenanceStore interface {
|
||||
CreatePlannedMaintenance(context.Context, GettablePlannedMaintenance) (valuer.UUID, error)
|
||||
DeletePlannedMaintenance(context.Context, valuer.UUID) error
|
||||
GetPlannedMaintenanceByID(context.Context, valuer.UUID) (*GettablePlannedMaintenance, error)
|
||||
EditPlannedMaintenance(context.Context, GettablePlannedMaintenance, valuer.UUID) error
|
||||
GetAllPlannedMaintenance(context.Context, string) ([]*GettablePlannedMaintenance, error)
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"testing"
|
||||
@ -14,13 +14,13 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
maintenance *PlannedMaintenance
|
||||
maintenance *GettablePlannedMaintenance
|
||||
ts time.Time
|
||||
skip bool
|
||||
}{
|
||||
{
|
||||
name: "only-on-saturday",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "Europe/London",
|
||||
Recurrence: &Recurrence{
|
||||
@ -37,7 +37,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Testing weekly recurrence with midnight crossing
|
||||
{
|
||||
name: "weekly-across-midnight-previous-day",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -54,7 +54,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Testing weekly recurrence with midnight crossing
|
||||
{
|
||||
name: "weekly-across-midnight-previous-day",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -71,7 +71,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Testing weekly recurrence with multi day duration
|
||||
{
|
||||
name: "weekly-across-midnight-previous-day",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -88,7 +88,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Weekly recurrence where the previous day is not in RepeatOn
|
||||
{
|
||||
name: "weekly-across-midnight-previous-day-not-in-repeaton",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -105,7 +105,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Daily recurrence with midnight crossing
|
||||
{
|
||||
name: "daily-maintenance-across-midnight",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -121,7 +121,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Exactly at start time boundary
|
||||
{
|
||||
name: "at-start-time-boundary",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -137,7 +137,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Exactly at end time boundary
|
||||
{
|
||||
name: "at-end-time-boundary",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -153,7 +153,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Monthly maintenance with multi-day duration
|
||||
{
|
||||
name: "monthly-multi-day-duration",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -169,7 +169,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Weekly maintenance with multi-day duration
|
||||
{
|
||||
name: "weekly-multi-day-duration",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -186,7 +186,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Monthly maintenance that crosses to next month
|
||||
{
|
||||
name: "monthly-crosses-to-next-month",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -202,7 +202,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Different timezone tests
|
||||
{
|
||||
name: "timezone-offset-test",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "America/New_York", // UTC-5 or UTC-4 depending on DST
|
||||
Recurrence: &Recurrence{
|
||||
@ -218,7 +218,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Test negative case - time well outside window
|
||||
{
|
||||
name: "daily-maintenance-time-outside-window",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -234,7 +234,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Test for recurring maintenance with an end date that is before the current time
|
||||
{
|
||||
name: "recurring-maintenance-with-past-end-date",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -251,7 +251,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Monthly recurring maintenance spanning end of month into beginning of next month
|
||||
{
|
||||
name: "monthly-maintenance-spans-month-end",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -267,7 +267,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// Test for RepeatOn with empty array (should apply to all days)
|
||||
{
|
||||
name: "weekly-empty-repeaton",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -284,7 +284,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
// February has fewer days than January - test the edge case when maintenance is on 31st
|
||||
{
|
||||
name: "monthly-maintenance-february-fewer-days",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -299,7 +299,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "daily-maintenance-crosses-midnight",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -314,7 +314,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "monthly-maintenance-crosses-month-end",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -329,7 +329,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-days",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -344,7 +344,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "weekly-maintenance-crosses-midnight",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -360,7 +360,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-days",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -375,7 +375,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "daily-maintenance-crosses-midnight",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -390,7 +390,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "monthly-maintenance-crosses-month-end-and-duration-is-2-hours",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -405,7 +405,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fixed planned maintenance start <= ts <= end",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
StartTime: time.Now().UTC().Add(-time.Hour),
|
||||
@ -417,7 +417,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fixed planned maintenance start >= ts",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
StartTime: time.Now().UTC().Add(time.Hour),
|
||||
@ -429,7 +429,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "fixed planned maintenance ts < start",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
StartTime: time.Now().UTC().Add(time.Hour),
|
||||
@ -441,7 +441,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat sunday, saturday, weekly for 24 hours, in Us/Eastern timezone",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "US/Eastern",
|
||||
Recurrence: &Recurrence{
|
||||
@ -457,7 +457,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -472,7 +472,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -487,7 +487,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat daily from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -502,7 +502,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -518,7 +518,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -534,7 +534,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -550,7 +550,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -566,7 +566,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat weekly on monday from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -582,7 +582,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -597,7 +597,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -612,7 +612,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "recurring maintenance, repeat monthly on 4th from 12:00 to 14:00",
|
||||
maintenance: &PlannedMaintenance{
|
||||
maintenance: &GettablePlannedMaintenance{
|
||||
Schedule: &Schedule{
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
@ -628,7 +628,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
}
|
||||
|
||||
for idx, c := range cases {
|
||||
result := c.maintenance.shouldSkip(c.name, c.ts)
|
||||
result := c.maintenance.ShouldSkip(c.name, c.ts)
|
||||
if result != c.skip {
|
||||
t.Errorf("skip %v, got %v, case:%d - %s", c.skip, result, idx, c.name)
|
||||
}
|
192
pkg/types/ruletypes/recurrence.go
Normal file
192
pkg/types/ruletypes/recurrence.go
Normal file
@ -0,0 +1,192 @@
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
type RepeatType string
|
||||
|
||||
const (
|
||||
RepeatTypeDaily RepeatType = "daily"
|
||||
RepeatTypeWeekly RepeatType = "weekly"
|
||||
RepeatTypeMonthly RepeatType = "monthly"
|
||||
)
|
||||
|
||||
type RepeatOn string
|
||||
|
||||
const (
|
||||
RepeatOnSunday RepeatOn = "sunday"
|
||||
RepeatOnMonday RepeatOn = "monday"
|
||||
RepeatOnTuesday RepeatOn = "tuesday"
|
||||
RepeatOnWednesday RepeatOn = "wednesday"
|
||||
RepeatOnThursday RepeatOn = "thursday"
|
||||
RepeatOnFriday RepeatOn = "friday"
|
||||
RepeatOnSaturday RepeatOn = "saturday"
|
||||
)
|
||||
|
||||
var RepeatOnAllMap = map[RepeatOn]time.Weekday{
|
||||
RepeatOnSunday: time.Sunday,
|
||||
RepeatOnMonday: time.Monday,
|
||||
RepeatOnTuesday: time.Tuesday,
|
||||
RepeatOnWednesday: time.Wednesday,
|
||||
RepeatOnThursday: time.Thursday,
|
||||
RepeatOnFriday: time.Friday,
|
||||
RepeatOnSaturday: time.Saturday,
|
||||
}
|
||||
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d *Duration) UnmarshalJSON(b []byte) error {
|
||||
var v interface{}
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
*d = Duration(time.Duration(value))
|
||||
return nil
|
||||
case string:
|
||||
tmp, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*d = Duration(tmp)
|
||||
|
||||
return nil
|
||||
default:
|
||||
return errors.New("invalid duration")
|
||||
}
|
||||
}
|
||||
|
||||
type Recurrence struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime *time.Time `json:"endTime,omitempty"`
|
||||
Duration Duration `json:"duration"`
|
||||
RepeatType RepeatType `json:"repeatType"`
|
||||
RepeatOn []RepeatOn `json:"repeatOn"`
|
||||
}
|
||||
|
||||
func (r *Recurrence) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, r)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Recurrence) Value() (driver.Value, error) {
|
||||
return json.Marshal(r)
|
||||
}
|
||||
|
||||
func (s Schedule) MarshalJSON() ([]byte, error) {
|
||||
loc, err := time.LoadLocation(s.Timezone)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var startTime, endTime time.Time
|
||||
if !s.StartTime.IsZero() {
|
||||
startTime = time.Date(s.StartTime.Year(), s.StartTime.Month(), s.StartTime.Day(), s.StartTime.Hour(), s.StartTime.Minute(), s.StartTime.Second(), s.StartTime.Nanosecond(), loc)
|
||||
}
|
||||
if !s.EndTime.IsZero() {
|
||||
endTime = time.Date(s.EndTime.Year(), s.EndTime.Month(), s.EndTime.Day(), s.EndTime.Hour(), s.EndTime.Minute(), s.EndTime.Second(), s.EndTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
var recurrence *Recurrence
|
||||
if s.Recurrence != nil {
|
||||
recStartTime := time.Date(s.Recurrence.StartTime.Year(), s.Recurrence.StartTime.Month(), s.Recurrence.StartTime.Day(), s.Recurrence.StartTime.Hour(), s.Recurrence.StartTime.Minute(), s.Recurrence.StartTime.Second(), s.Recurrence.StartTime.Nanosecond(), loc)
|
||||
var recEndTime *time.Time
|
||||
if s.Recurrence.EndTime != nil {
|
||||
end := time.Date(s.Recurrence.EndTime.Year(), s.Recurrence.EndTime.Month(), s.Recurrence.EndTime.Day(), s.Recurrence.EndTime.Hour(), s.Recurrence.EndTime.Minute(), s.Recurrence.EndTime.Second(), s.Recurrence.EndTime.Nanosecond(), loc)
|
||||
recEndTime = &end
|
||||
}
|
||||
recurrence = &Recurrence{
|
||||
StartTime: recStartTime,
|
||||
EndTime: recEndTime,
|
||||
Duration: s.Recurrence.Duration,
|
||||
RepeatType: s.Recurrence.RepeatType,
|
||||
RepeatOn: s.Recurrence.RepeatOn,
|
||||
}
|
||||
}
|
||||
|
||||
return json.Marshal(&struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime string `json:"startTime"`
|
||||
EndTime string `json:"endTime"`
|
||||
Recurrence *Recurrence `json:"recurrence,omitempty"`
|
||||
}{
|
||||
Timezone: s.Timezone,
|
||||
StartTime: startTime.Format(time.RFC3339),
|
||||
EndTime: endTime.Format(time.RFC3339),
|
||||
Recurrence: recurrence,
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Schedule) UnmarshalJSON(data []byte) error {
|
||||
aux := &struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime string `json:"startTime"`
|
||||
EndTime string `json:"endTime"`
|
||||
Recurrence *Recurrence `json:"recurrence,omitempty"`
|
||||
}{}
|
||||
if err := json.Unmarshal(data, aux); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
loc, err := time.LoadLocation(aux.Timezone)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var startTime time.Time
|
||||
if aux.StartTime != "" {
|
||||
startTime, err = time.Parse(time.RFC3339, aux.StartTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.StartTime = time.Date(startTime.Year(), startTime.Month(), startTime.Day(), startTime.Hour(), startTime.Minute(), startTime.Second(), startTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
var endTime time.Time
|
||||
if aux.EndTime != "" {
|
||||
endTime, err = time.Parse(time.RFC3339, aux.EndTime)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.EndTime = time.Date(endTime.Year(), endTime.Month(), endTime.Day(), endTime.Hour(), endTime.Minute(), endTime.Second(), endTime.Nanosecond(), loc)
|
||||
}
|
||||
|
||||
s.Timezone = aux.Timezone
|
||||
|
||||
if aux.Recurrence != nil {
|
||||
recStartTime, err := time.Parse(time.RFC3339, aux.Recurrence.StartTime.Format(time.RFC3339))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var recEndTime *time.Time
|
||||
if aux.Recurrence.EndTime != nil {
|
||||
end, err := time.Parse(time.RFC3339, aux.Recurrence.EndTime.Format(time.RFC3339))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
endConverted := time.Date(end.Year(), end.Month(), end.Day(), end.Hour(), end.Minute(), end.Second(), end.Nanosecond(), loc)
|
||||
recEndTime = &endConverted
|
||||
}
|
||||
|
||||
s.Recurrence = &Recurrence{
|
||||
StartTime: time.Date(recStartTime.Year(), recStartTime.Month(), recStartTime.Day(), recStartTime.Hour(), recStartTime.Minute(), recStartTime.Second(), recStartTime.Nanosecond(), loc),
|
||||
EndTime: recEndTime,
|
||||
Duration: aux.Recurrence.Duration,
|
||||
RepeatType: aux.Recurrence.RepeatType,
|
||||
RepeatOn: aux.Recurrence.RepeatOn,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
37
pkg/types/ruletypes/rule.go
Normal file
37
pkg/types/ruletypes/rule.go
Normal file
@ -0,0 +1,37 @@
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Rule struct {
|
||||
bun.BaseModel `bun:"table:rule"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
types.UserAuditable
|
||||
Deleted int `bun:"deleted,notnull,default:0"`
|
||||
Data string `bun:"data,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text"`
|
||||
}
|
||||
|
||||
type RuleHistory struct {
|
||||
bun.BaseModel `bun:"table:rule_history"`
|
||||
RuleID int `bun:"rule_id"`
|
||||
RuleUUID valuer.UUID `bun:"rule_uuid"`
|
||||
}
|
||||
|
||||
type RuleStore interface {
|
||||
CreateRule(context.Context, *Rule, func(context.Context, valuer.UUID) error) (valuer.UUID, error)
|
||||
EditRule(context.Context, *Rule, func(context.Context) error) error
|
||||
DeleteRule(context.Context, valuer.UUID, func(context.Context) error) error
|
||||
GetStoredRules(context.Context, string) ([]*Rule, error)
|
||||
GetStoredRule(context.Context, valuer.UUID) (*Rule, error)
|
||||
GetRuleUUID(context.Context, int) (*RuleHistory, error)
|
||||
GetAlertsInfo(context.Context) (*model.AlertsInfo, error)
|
||||
ListOrgs(context.Context) ([]string, error)
|
||||
}
|
25
pkg/types/ruletypes/schedule.go
Normal file
25
pkg/types/ruletypes/schedule.go
Normal file
@ -0,0 +1,25 @@
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Schedule struct {
|
||||
Timezone string `json:"timezone"`
|
||||
StartTime time.Time `json:"startTime,omitempty"`
|
||||
EndTime time.Time `json:"endTime,omitempty"`
|
||||
Recurrence *Recurrence `json:"recurrence"`
|
||||
}
|
||||
|
||||
func (s *Schedule) Scan(src interface{}) error {
|
||||
if data, ok := src.([]byte); ok {
|
||||
return json.Unmarshal(data, s)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Schedule) Value() (driver.Value, error) {
|
||||
return json.Marshal(s)
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"bytes"
|
@ -1,4 +1,4 @@
|
||||
package rules
|
||||
package ruletypes
|
||||
|
||||
import (
|
||||
"context"
|
Loading…
x
Reference in New Issue
Block a user