chore: add aggregation expr rewriter and exhaustive tests for logs filter (#7972)

This commit is contained in:
Srikanth Chekuri 2025-05-20 16:54:34 +05:30 committed by GitHub
parent 7290ab3602
commit 018346ca18
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 5134 additions and 488 deletions

View File

@ -39,6 +39,7 @@ primary
| functionCall
| fullText
| key
| value
;
/*
@ -189,9 +190,13 @@ BOOL
| [Ff][Aa][Ll][Ss][Ee]
;
// Numbers (integer or float). Adjust as needed for your domain.
fragment SIGN : [+-] ;
// Numbers: optional sign, then digits, optional fractional part,
// optional scientific notation (handy for future use)
NUMBER
: DIGIT+ ( '.' DIGIT+ )?
: SIGN? DIGIT+ ('.' DIGIT*)? ([eE] SIGN? DIGIT+)? // -10.25 42 +3.14 6.02e23
| SIGN? '.' DIGIT+ ([eE] SIGN? DIGIT+)? // -.75 .5 -.5e-3
;
// Double/single-quoted text, capturing full text search strings, values, etc.
@ -201,10 +206,12 @@ QUOTED_TEXT
)
;
// Keys can have letters, digits, underscores, dots, and bracket pairs
// e.g. service.name, service.namespace, db.queries[].query_duration
fragment SEGMENT : [a-zA-Z] [a-zA-Z0-9_:\-]* ;
fragment EMPTY_BRACKS : '[' ']' ;
fragment OLD_JSON_BRACKS: '[' '*' ']';
KEY
: [a-zA-Z0-9_] [a-zA-Z0-9_.*[\]]*
: SEGMENT ( '.' SEGMENT | EMPTY_BRACKS | OLD_JSON_BRACKS)*
;
// Ignore whitespace

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -57,139 +57,171 @@ func filterquerylexerLexerInit() {
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "NOT_LIKE", "ILIKE", "NOT_ILIKE",
"BETWEEN", "EXISTS", "REGEXP", "CONTAINS", "IN", "NOT", "AND", "OR",
"HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", "KEY", "WS",
"DIGIT", "FREETEXT",
"HAS", "HASANY", "HASALL", "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT",
"SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT",
"FREETEXT",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 33, 270, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 0, 33, 334, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1,
2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 83, 8, 5, 1, 6, 1, 6,
1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1,
11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13,
1, 13, 4, 13, 110, 8, 13, 11, 13, 12, 13, 111, 1, 13, 1, 13, 1, 13, 1,
13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15,
1, 15, 4, 15, 129, 8, 15, 11, 15, 12, 15, 130, 1, 15, 1, 15, 1, 15, 1,
15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16,
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 153, 8, 17, 1, 18, 1,
18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
1, 19, 1, 19, 1, 19, 3, 19, 170, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1,
21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24,
1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1,
26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 213, 8, 27, 1, 28, 4, 28, 216,
8, 28, 11, 28, 12, 28, 217, 1, 28, 1, 28, 4, 28, 222, 8, 28, 11, 28, 12,
28, 223, 3, 28, 226, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 232, 8,
29, 10, 29, 12, 29, 235, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29,
242, 8, 29, 10, 29, 12, 29, 245, 9, 29, 1, 29, 3, 29, 248, 8, 29, 1, 30,
1, 30, 5, 30, 252, 8, 30, 10, 30, 12, 30, 255, 9, 30, 1, 31, 4, 31, 258,
8, 31, 11, 31, 12, 31, 259, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 4, 33, 267,
8, 33, 11, 33, 12, 33, 268, 0, 0, 34, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11,
6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15,
31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24,
49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 0,
67, 33, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0,
75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0,
79, 79, 111, 111, 2, 0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66,
66, 98, 98, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83,
115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80,
112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100,
100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117,
117, 2, 0, 70, 70, 102, 102, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92,
4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 7, 0, 42, 42, 46, 46, 48, 57, 65,
91, 93, 93, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57,
8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 285,
0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0,
0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0,
0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0,
0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1,
0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39,
1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0,
47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0,
0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0,
0, 0, 63, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 1, 69, 1, 0, 0, 0, 3, 71, 1, 0,
0, 0, 5, 73, 1, 0, 0, 0, 7, 75, 1, 0, 0, 0, 9, 77, 1, 0, 0, 0, 11, 82,
1, 0, 0, 0, 13, 84, 1, 0, 0, 0, 15, 87, 1, 0, 0, 0, 17, 90, 1, 0, 0, 0,
19, 92, 1, 0, 0, 0, 21, 95, 1, 0, 0, 0, 23, 97, 1, 0, 0, 0, 25, 100, 1,
0, 0, 0, 27, 105, 1, 0, 0, 0, 29, 118, 1, 0, 0, 0, 31, 124, 1, 0, 0, 0,
33, 138, 1, 0, 0, 0, 35, 146, 1, 0, 0, 0, 37, 154, 1, 0, 0, 0, 39, 161,
1, 0, 0, 0, 41, 171, 1, 0, 0, 0, 43, 174, 1, 0, 0, 0, 45, 178, 1, 0, 0,
0, 47, 182, 1, 0, 0, 0, 49, 185, 1, 0, 0, 0, 51, 189, 1, 0, 0, 0, 53, 196,
1, 0, 0, 0, 55, 212, 1, 0, 0, 0, 57, 215, 1, 0, 0, 0, 59, 247, 1, 0, 0,
0, 61, 249, 1, 0, 0, 0, 63, 257, 1, 0, 0, 0, 65, 263, 1, 0, 0, 0, 67, 266,
1, 0, 0, 0, 69, 70, 5, 40, 0, 0, 70, 2, 1, 0, 0, 0, 71, 72, 5, 41, 0, 0,
72, 4, 1, 0, 0, 0, 73, 74, 5, 91, 0, 0, 74, 6, 1, 0, 0, 0, 75, 76, 5, 93,
0, 0, 76, 8, 1, 0, 0, 0, 77, 78, 5, 44, 0, 0, 78, 10, 1, 0, 0, 0, 79, 83,
5, 61, 0, 0, 80, 81, 5, 61, 0, 0, 81, 83, 5, 61, 0, 0, 82, 79, 1, 0, 0,
0, 82, 80, 1, 0, 0, 0, 83, 12, 1, 0, 0, 0, 84, 85, 5, 33, 0, 0, 85, 86,
5, 61, 0, 0, 86, 14, 1, 0, 0, 0, 87, 88, 5, 60, 0, 0, 88, 89, 5, 62, 0,
0, 89, 16, 1, 0, 0, 0, 90, 91, 5, 60, 0, 0, 91, 18, 1, 0, 0, 0, 92, 93,
5, 60, 0, 0, 93, 94, 5, 61, 0, 0, 94, 20, 1, 0, 0, 0, 95, 96, 5, 62, 0,
0, 96, 22, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 99, 5, 61, 0, 0, 99, 24,
1, 0, 0, 0, 100, 101, 7, 0, 0, 0, 101, 102, 7, 1, 0, 0, 102, 103, 7, 2,
0, 0, 103, 104, 7, 3, 0, 0, 104, 26, 1, 0, 0, 0, 105, 106, 7, 4, 0, 0,
106, 107, 7, 5, 0, 0, 107, 109, 7, 6, 0, 0, 108, 110, 7, 7, 0, 0, 109,
108, 1, 0, 0, 0, 110, 111, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 111, 112,
1, 0, 0, 0, 112, 113, 1, 0, 0, 0, 113, 114, 7, 0, 0, 0, 114, 115, 7, 1,
0, 0, 115, 116, 7, 2, 0, 0, 116, 117, 7, 3, 0, 0, 117, 28, 1, 0, 0, 0,
118, 119, 7, 1, 0, 0, 119, 120, 7, 0, 0, 0, 120, 121, 7, 1, 0, 0, 121,
122, 7, 2, 0, 0, 122, 123, 7, 3, 0, 0, 123, 30, 1, 0, 0, 0, 124, 125, 7,
4, 0, 0, 125, 126, 7, 5, 0, 0, 126, 128, 7, 6, 0, 0, 127, 129, 7, 7, 0,
0, 128, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130,
131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 133, 7, 1, 0, 0, 133, 134,
7, 0, 0, 0, 134, 135, 7, 1, 0, 0, 135, 136, 7, 2, 0, 0, 136, 137, 7, 3,
0, 0, 137, 32, 1, 0, 0, 0, 138, 139, 7, 8, 0, 0, 139, 140, 7, 3, 0, 0,
140, 141, 7, 6, 0, 0, 141, 142, 7, 9, 0, 0, 142, 143, 7, 3, 0, 0, 143,
144, 7, 3, 0, 0, 144, 145, 7, 4, 0, 0, 145, 34, 1, 0, 0, 0, 146, 147, 7,
3, 0, 0, 147, 148, 7, 10, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 11,
0, 0, 150, 152, 7, 6, 0, 0, 151, 153, 7, 11, 0, 0, 152, 151, 1, 0, 0, 0,
152, 153, 1, 0, 0, 0, 153, 36, 1, 0, 0, 0, 154, 155, 7, 12, 0, 0, 155,
156, 7, 3, 0, 0, 156, 157, 7, 13, 0, 0, 157, 158, 7, 3, 0, 0, 158, 159,
7, 10, 0, 0, 159, 160, 7, 14, 0, 0, 160, 38, 1, 0, 0, 0, 161, 162, 7, 15,
0, 0, 162, 163, 7, 5, 0, 0, 163, 164, 7, 4, 0, 0, 164, 165, 7, 6, 0, 0,
165, 166, 7, 16, 0, 0, 166, 167, 7, 1, 0, 0, 167, 169, 7, 4, 0, 0, 168,
170, 7, 11, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 40,
1, 0, 0, 0, 171, 172, 7, 1, 0, 0, 172, 173, 7, 4, 0, 0, 173, 42, 1, 0,
0, 0, 174, 175, 7, 4, 0, 0, 175, 176, 7, 5, 0, 0, 176, 177, 7, 6, 0, 0,
177, 44, 1, 0, 0, 0, 178, 179, 7, 16, 0, 0, 179, 180, 7, 4, 0, 0, 180,
181, 7, 17, 0, 0, 181, 46, 1, 0, 0, 0, 182, 183, 7, 5, 0, 0, 183, 184,
7, 12, 0, 0, 184, 48, 1, 0, 0, 0, 185, 186, 7, 18, 0, 0, 186, 187, 7, 16,
0, 0, 187, 188, 7, 11, 0, 0, 188, 50, 1, 0, 0, 0, 189, 190, 7, 18, 0, 0,
190, 191, 7, 16, 0, 0, 191, 192, 7, 11, 0, 0, 192, 193, 7, 16, 0, 0, 193,
194, 7, 4, 0, 0, 194, 195, 7, 19, 0, 0, 195, 52, 1, 0, 0, 0, 196, 197,
7, 18, 0, 0, 197, 198, 7, 16, 0, 0, 198, 199, 7, 11, 0, 0, 199, 200, 7,
16, 0, 0, 200, 201, 7, 0, 0, 0, 201, 202, 7, 0, 0, 0, 202, 54, 1, 0, 0,
0, 203, 204, 7, 6, 0, 0, 204, 205, 7, 12, 0, 0, 205, 206, 7, 20, 0, 0,
206, 213, 7, 3, 0, 0, 207, 208, 7, 21, 0, 0, 208, 209, 7, 16, 0, 0, 209,
210, 7, 0, 0, 0, 210, 211, 7, 11, 0, 0, 211, 213, 7, 3, 0, 0, 212, 203,
1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 213, 56, 1, 0, 0, 0, 214, 216, 3, 65,
32, 0, 215, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0,
217, 218, 1, 0, 0, 0, 218, 225, 1, 0, 0, 0, 219, 221, 5, 46, 0, 0, 220,
222, 3, 65, 32, 0, 221, 220, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 221,
1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 219, 1, 0,
0, 0, 225, 226, 1, 0, 0, 0, 226, 58, 1, 0, 0, 0, 227, 233, 5, 34, 0, 0,
228, 232, 8, 22, 0, 0, 229, 230, 5, 92, 0, 0, 230, 232, 9, 0, 0, 0, 231,
228, 1, 0, 0, 0, 231, 229, 1, 0, 0, 0, 232, 235, 1, 0, 0, 0, 233, 231,
1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 236, 1, 0, 0, 0, 235, 233, 1, 0,
0, 0, 236, 248, 5, 34, 0, 0, 237, 243, 5, 39, 0, 0, 238, 242, 8, 23, 0,
0, 239, 240, 5, 92, 0, 0, 240, 242, 9, 0, 0, 0, 241, 238, 1, 0, 0, 0, 241,
239, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244,
1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 248, 5, 39,
0, 0, 247, 227, 1, 0, 0, 0, 247, 237, 1, 0, 0, 0, 248, 60, 1, 0, 0, 0,
249, 253, 7, 24, 0, 0, 250, 252, 7, 25, 0, 0, 251, 250, 1, 0, 0, 0, 252,
255, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 62, 1,
0, 0, 0, 255, 253, 1, 0, 0, 0, 256, 258, 7, 26, 0, 0, 257, 256, 1, 0, 0,
0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260,
261, 1, 0, 0, 0, 261, 262, 6, 31, 0, 0, 262, 64, 1, 0, 0, 0, 263, 264,
7, 27, 0, 0, 264, 66, 1, 0, 0, 0, 265, 267, 8, 28, 0, 0, 266, 265, 1, 0,
0, 0, 267, 268, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0,
269, 68, 1, 0, 0, 0, 18, 0, 82, 111, 130, 152, 169, 212, 217, 223, 225,
231, 233, 241, 243, 247, 253, 259, 268, 1, 6, 0, 0,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 91, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7,
1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11,
1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 4, 13, 118,
8, 13, 11, 13, 12, 13, 119, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1,
14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 4, 15, 137,
8, 15, 11, 15, 12, 15, 138, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1,
16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17,
1, 17, 1, 17, 1, 17, 3, 17, 161, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1,
18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19,
3, 19, 178, 8, 19, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1,
22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24,
1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1,
26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27,
1, 27, 1, 27, 3, 27, 221, 8, 27, 1, 28, 1, 28, 1, 29, 3, 29, 226, 8, 29,
1, 29, 4, 29, 229, 8, 29, 11, 29, 12, 29, 230, 1, 29, 1, 29, 5, 29, 235,
8, 29, 10, 29, 12, 29, 238, 9, 29, 3, 29, 240, 8, 29, 1, 29, 1, 29, 3,
29, 244, 8, 29, 1, 29, 4, 29, 247, 8, 29, 11, 29, 12, 29, 248, 3, 29, 251,
8, 29, 1, 29, 3, 29, 254, 8, 29, 1, 29, 1, 29, 4, 29, 258, 8, 29, 11, 29,
12, 29, 259, 1, 29, 1, 29, 3, 29, 264, 8, 29, 1, 29, 4, 29, 267, 8, 29,
11, 29, 12, 29, 268, 3, 29, 271, 8, 29, 3, 29, 273, 8, 29, 1, 30, 1, 30,
1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 30, 1,
30, 1, 30, 1, 30, 1, 30, 5, 30, 289, 8, 30, 10, 30, 12, 30, 292, 9, 30,
1, 30, 3, 30, 295, 8, 30, 1, 31, 1, 31, 5, 31, 299, 8, 31, 10, 31, 12,
31, 302, 9, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34,
1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 316, 8, 34, 10, 34, 12, 34, 319, 9,
34, 1, 35, 4, 35, 322, 8, 35, 11, 35, 12, 35, 323, 1, 35, 1, 35, 1, 36,
1, 36, 1, 37, 4, 37, 331, 8, 37, 11, 37, 12, 37, 332, 0, 0, 38, 1, 1, 3,
2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12,
25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21,
43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 29,
61, 30, 63, 0, 65, 0, 67, 0, 69, 31, 71, 32, 73, 0, 75, 33, 1, 0, 30, 2,
0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2,
0, 69, 69, 101, 101, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2,
0, 84, 84, 116, 116, 2, 0, 9, 9, 32, 32, 2, 0, 66, 66, 98, 98, 2, 0, 87,
87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82,
82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67,
67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72,
104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70,
102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92,
92, 2, 0, 65, 90, 97, 122, 5, 0, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122,
3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34,
39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1,
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1,
0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75,
1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7,
83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0,
0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103,
1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0,
0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154,
1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0,
0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193,
1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0,
0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296,
1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0,
0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78,
5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0,
81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1,
0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0,
88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1,
0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0,
94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1,
0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0,
101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104,
22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24,
1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2,
0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0,
114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117,
116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120,
1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1,
0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0,
126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129,
130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7,
4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0,
0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138,
139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142,
7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3,
0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0,
148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151,
152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7,
3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11,
0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0,
160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163,
164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167,
7, 10, 0, 0, 167, 168, 7, 14, 0, 0, 168, 38, 1, 0, 0, 0, 169, 170, 7, 15,
0, 0, 170, 171, 7, 5, 0, 0, 171, 172, 7, 4, 0, 0, 172, 173, 7, 6, 0, 0,
173, 174, 7, 16, 0, 0, 174, 175, 7, 1, 0, 0, 175, 177, 7, 4, 0, 0, 176,
178, 7, 11, 0, 0, 177, 176, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 40,
1, 0, 0, 0, 179, 180, 7, 1, 0, 0, 180, 181, 7, 4, 0, 0, 181, 42, 1, 0,
0, 0, 182, 183, 7, 4, 0, 0, 183, 184, 7, 5, 0, 0, 184, 185, 7, 6, 0, 0,
185, 44, 1, 0, 0, 0, 186, 187, 7, 16, 0, 0, 187, 188, 7, 4, 0, 0, 188,
189, 7, 17, 0, 0, 189, 46, 1, 0, 0, 0, 190, 191, 7, 5, 0, 0, 191, 192,
7, 12, 0, 0, 192, 48, 1, 0, 0, 0, 193, 194, 7, 18, 0, 0, 194, 195, 7, 16,
0, 0, 195, 196, 7, 11, 0, 0, 196, 50, 1, 0, 0, 0, 197, 198, 7, 18, 0, 0,
198, 199, 7, 16, 0, 0, 199, 200, 7, 11, 0, 0, 200, 201, 7, 16, 0, 0, 201,
202, 7, 4, 0, 0, 202, 203, 7, 19, 0, 0, 203, 52, 1, 0, 0, 0, 204, 205,
7, 18, 0, 0, 205, 206, 7, 16, 0, 0, 206, 207, 7, 11, 0, 0, 207, 208, 7,
16, 0, 0, 208, 209, 7, 0, 0, 0, 209, 210, 7, 0, 0, 0, 210, 54, 1, 0, 0,
0, 211, 212, 7, 6, 0, 0, 212, 213, 7, 12, 0, 0, 213, 214, 7, 20, 0, 0,
214, 221, 7, 3, 0, 0, 215, 216, 7, 21, 0, 0, 216, 217, 7, 16, 0, 0, 217,
218, 7, 0, 0, 0, 218, 219, 7, 11, 0, 0, 219, 221, 7, 3, 0, 0, 220, 211,
1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 221, 56, 1, 0, 0, 0, 222, 223, 7, 22,
0, 0, 223, 58, 1, 0, 0, 0, 224, 226, 3, 57, 28, 0, 225, 224, 1, 0, 0, 0,
225, 226, 1, 0, 0, 0, 226, 228, 1, 0, 0, 0, 227, 229, 3, 73, 36, 0, 228,
227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231,
1, 0, 0, 0, 231, 239, 1, 0, 0, 0, 232, 236, 5, 46, 0, 0, 233, 235, 3, 73,
36, 0, 234, 233, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0,
236, 237, 1, 0, 0, 0, 237, 240, 1, 0, 0, 0, 238, 236, 1, 0, 0, 0, 239,
232, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243,
7, 3, 0, 0, 242, 244, 3, 57, 28, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1,
0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 73, 36, 0, 246, 245, 1, 0,
0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0,
249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251,
273, 1, 0, 0, 0, 252, 254, 3, 57, 28, 0, 253, 252, 1, 0, 0, 0, 253, 254,
1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 257, 5, 46, 0, 0, 256, 258, 3, 73,
36, 0, 257, 256, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0,
259, 260, 1, 0, 0, 0, 260, 270, 1, 0, 0, 0, 261, 263, 7, 3, 0, 0, 262,
264, 3, 57, 28, 0, 263, 262, 1, 0, 0, 0, 263, 264, 1, 0, 0, 0, 264, 266,
1, 0, 0, 0, 265, 267, 3, 73, 36, 0, 266, 265, 1, 0, 0, 0, 267, 268, 1,
0, 0, 0, 268, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 271, 1, 0, 0,
0, 270, 261, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272,
225, 1, 0, 0, 0, 272, 253, 1, 0, 0, 0, 273, 60, 1, 0, 0, 0, 274, 280, 5,
34, 0, 0, 275, 279, 8, 23, 0, 0, 276, 277, 5, 92, 0, 0, 277, 279, 9, 0,
0, 0, 278, 275, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0,
280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 283, 1, 0, 0, 0, 282,
280, 1, 0, 0, 0, 283, 295, 5, 34, 0, 0, 284, 290, 5, 39, 0, 0, 285, 289,
8, 24, 0, 0, 286, 287, 5, 92, 0, 0, 287, 289, 9, 0, 0, 0, 288, 285, 1,
0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0,
0, 290, 291, 1, 0, 0, 0, 291, 293, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293,
295, 5, 39, 0, 0, 294, 274, 1, 0, 0, 0, 294, 284, 1, 0, 0, 0, 295, 62,
1, 0, 0, 0, 296, 300, 7, 25, 0, 0, 297, 299, 7, 26, 0, 0, 298, 297, 1,
0, 0, 0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0,
0, 301, 64, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 304, 5, 91, 0, 0, 304,
305, 5, 93, 0, 0, 305, 66, 1, 0, 0, 0, 306, 307, 5, 91, 0, 0, 307, 308,
5, 42, 0, 0, 308, 309, 5, 93, 0, 0, 309, 68, 1, 0, 0, 0, 310, 317, 3, 63,
31, 0, 311, 312, 5, 46, 0, 0, 312, 316, 3, 63, 31, 0, 313, 316, 3, 65,
32, 0, 314, 316, 3, 67, 33, 0, 315, 311, 1, 0, 0, 0, 315, 313, 1, 0, 0,
0, 315, 314, 1, 0, 0, 0, 316, 319, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 317,
318, 1, 0, 0, 0, 318, 70, 1, 0, 0, 0, 319, 317, 1, 0, 0, 0, 320, 322, 7,
27, 0, 0, 321, 320, 1, 0, 0, 0, 322, 323, 1, 0, 0, 0, 323, 321, 1, 0, 0,
0, 323, 324, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 35, 0, 0, 326,
72, 1, 0, 0, 0, 327, 328, 7, 28, 0, 0, 328, 74, 1, 0, 0, 0, 329, 331, 8,
29, 0, 0, 330, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 330, 1, 0, 0,
0, 332, 333, 1, 0, 0, 0, 333, 76, 1, 0, 0, 0, 30, 0, 90, 119, 138, 160,
177, 220, 225, 230, 236, 239, 243, 248, 250, 253, 259, 263, 268, 270, 272,
278, 280, 288, 290, 294, 300, 315, 317, 323, 332, 1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)

View File

@ -51,97 +51,97 @@ func filterqueryParserInit() {
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 33, 212, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 1, 33, 213, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43,
8, 2, 10, 2, 12, 2, 46, 9, 2, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 52, 8, 3, 10,
3, 12, 3, 55, 9, 3, 1, 4, 3, 4, 58, 8, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5,
1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 70, 8, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1,
1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 71, 8, 5, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3,
6, 148, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1,
7, 3, 7, 160, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 1, 8, 1, 8, 3, 8, 174, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 179, 8, 9,
10, 9, 12, 9, 182, 9, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11,
1, 12, 1, 12, 1, 12, 5, 12, 194, 8, 12, 10, 12, 12, 12, 197, 9, 12, 1,
13, 1, 13, 1, 13, 3, 13, 202, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15,
1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 22, 24, 26, 28, 30, 32, 0, 6, 1, 0, 7, 8, 2, 0, 13, 13, 15, 15, 2,
0, 14, 14, 16, 16, 2, 0, 30, 30, 33, 33, 1, 0, 25, 27, 1, 0, 28, 31, 225,
0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0,
0, 8, 57, 1, 0, 0, 0, 10, 69, 1, 0, 0, 0, 12, 147, 1, 0, 0, 0, 14, 159,
1, 0, 0, 0, 16, 173, 1, 0, 0, 0, 18, 175, 1, 0, 0, 0, 20, 183, 1, 0, 0,
0, 22, 185, 1, 0, 0, 0, 24, 190, 1, 0, 0, 0, 26, 201, 1, 0, 0, 0, 28, 203,
1, 0, 0, 0, 30, 207, 1, 0, 0, 0, 32, 209, 1, 0, 0, 0, 34, 35, 3, 2, 1,
0, 35, 36, 5, 0, 0, 1, 36, 1, 1, 0, 0, 0, 37, 38, 3, 4, 2, 0, 38, 3, 1,
0, 0, 0, 39, 44, 3, 6, 3, 0, 40, 41, 5, 24, 0, 0, 41, 43, 3, 6, 3, 0, 42,
40, 1, 0, 0, 0, 43, 46, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 44, 45, 1, 0, 0,
0, 45, 5, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 47, 53, 3, 8, 4, 0, 48, 49, 5,
23, 0, 0, 49, 52, 3, 8, 4, 0, 50, 52, 3, 8, 4, 0, 51, 48, 1, 0, 0, 0, 51,
50, 1, 0, 0, 0, 52, 55, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0, 53, 54, 1, 0, 0,
0, 54, 7, 1, 0, 0, 0, 55, 53, 1, 0, 0, 0, 56, 58, 5, 22, 0, 0, 57, 56,
1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60, 3, 10, 5, 0,
60, 9, 1, 0, 0, 0, 61, 62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0, 63, 64, 5, 2,
0, 0, 64, 70, 1, 0, 0, 0, 65, 70, 3, 12, 6, 0, 66, 70, 3, 22, 11, 0, 67,
70, 3, 20, 10, 0, 68, 70, 3, 32, 16, 0, 69, 61, 1, 0, 0, 0, 69, 65, 1,
0, 0, 0, 69, 66, 1, 0, 0, 0, 69, 67, 1, 0, 0, 0, 69, 68, 1, 0, 0, 0, 70,
11, 1, 0, 0, 0, 71, 72, 3, 32, 16, 0, 72, 73, 5, 6, 0, 0, 73, 74, 3, 30,
15, 0, 74, 148, 1, 0, 0, 0, 75, 76, 3, 32, 16, 0, 76, 77, 7, 0, 0, 0, 77,
78, 3, 30, 15, 0, 78, 148, 1, 0, 0, 0, 79, 80, 3, 32, 16, 0, 80, 81, 5,
9, 0, 0, 81, 82, 3, 30, 15, 0, 82, 148, 1, 0, 0, 0, 83, 84, 3, 32, 16,
0, 84, 85, 5, 10, 0, 0, 85, 86, 3, 30, 15, 0, 86, 148, 1, 0, 0, 0, 87,
88, 3, 32, 16, 0, 88, 89, 5, 11, 0, 0, 89, 90, 3, 30, 15, 0, 90, 148, 1,
0, 0, 0, 91, 92, 3, 32, 16, 0, 92, 93, 5, 12, 0, 0, 93, 94, 3, 30, 15,
0, 94, 148, 1, 0, 0, 0, 95, 96, 3, 32, 16, 0, 96, 97, 7, 1, 0, 0, 97, 98,
3, 30, 15, 0, 98, 148, 1, 0, 0, 0, 99, 100, 3, 32, 16, 0, 100, 101, 7,
2, 0, 0, 101, 102, 3, 30, 15, 0, 102, 148, 1, 0, 0, 0, 103, 104, 3, 32,
16, 0, 104, 105, 5, 17, 0, 0, 105, 106, 3, 30, 15, 0, 106, 107, 5, 23,
0, 0, 107, 108, 3, 30, 15, 0, 108, 148, 1, 0, 0, 0, 109, 110, 3, 32, 16,
0, 110, 111, 5, 22, 0, 0, 111, 112, 5, 17, 0, 0, 112, 113, 3, 30, 15, 0,
113, 114, 5, 23, 0, 0, 114, 115, 3, 30, 15, 0, 115, 148, 1, 0, 0, 0, 116,
117, 3, 32, 16, 0, 117, 118, 3, 14, 7, 0, 118, 148, 1, 0, 0, 0, 119, 120,
3, 32, 16, 0, 120, 121, 3, 16, 8, 0, 121, 148, 1, 0, 0, 0, 122, 123, 3,
32, 16, 0, 123, 124, 5, 18, 0, 0, 124, 148, 1, 0, 0, 0, 125, 126, 3, 32,
16, 0, 126, 127, 5, 22, 0, 0, 127, 128, 5, 18, 0, 0, 128, 148, 1, 0, 0,
0, 129, 130, 3, 32, 16, 0, 130, 131, 5, 19, 0, 0, 131, 132, 3, 30, 15,
0, 132, 148, 1, 0, 0, 0, 133, 134, 3, 32, 16, 0, 134, 135, 5, 22, 0, 0,
135, 136, 5, 19, 0, 0, 136, 137, 3, 30, 15, 0, 137, 148, 1, 0, 0, 0, 138,
139, 3, 32, 16, 0, 139, 140, 5, 20, 0, 0, 140, 141, 3, 30, 15, 0, 141,
148, 1, 0, 0, 0, 142, 143, 3, 32, 16, 0, 143, 144, 5, 22, 0, 0, 144, 145,
5, 20, 0, 0, 145, 146, 3, 30, 15, 0, 146, 148, 1, 0, 0, 0, 147, 71, 1,
0, 0, 0, 147, 75, 1, 0, 0, 0, 147, 79, 1, 0, 0, 0, 147, 83, 1, 0, 0, 0,
147, 87, 1, 0, 0, 0, 147, 91, 1, 0, 0, 0, 147, 95, 1, 0, 0, 0, 147, 99,
1, 0, 0, 0, 147, 103, 1, 0, 0, 0, 147, 109, 1, 0, 0, 0, 147, 116, 1, 0,
0, 0, 147, 119, 1, 0, 0, 0, 147, 122, 1, 0, 0, 0, 147, 125, 1, 0, 0, 0,
147, 129, 1, 0, 0, 0, 147, 133, 1, 0, 0, 0, 147, 138, 1, 0, 0, 0, 147,
142, 1, 0, 0, 0, 148, 13, 1, 0, 0, 0, 149, 150, 5, 21, 0, 0, 150, 151,
5, 1, 0, 0, 151, 152, 3, 18, 9, 0, 152, 153, 5, 2, 0, 0, 153, 160, 1, 0,
0, 0, 154, 155, 5, 21, 0, 0, 155, 156, 5, 3, 0, 0, 156, 157, 3, 18, 9,
0, 157, 158, 5, 4, 0, 0, 158, 160, 1, 0, 0, 0, 159, 149, 1, 0, 0, 0, 159,
154, 1, 0, 0, 0, 160, 15, 1, 0, 0, 0, 161, 162, 5, 22, 0, 0, 162, 163,
5, 21, 0, 0, 163, 164, 5, 1, 0, 0, 164, 165, 3, 18, 9, 0, 165, 166, 5,
2, 0, 0, 166, 174, 1, 0, 0, 0, 167, 168, 5, 22, 0, 0, 168, 169, 5, 21,
0, 0, 169, 170, 5, 3, 0, 0, 170, 171, 3, 18, 9, 0, 171, 172, 5, 4, 0, 0,
172, 174, 1, 0, 0, 0, 173, 161, 1, 0, 0, 0, 173, 167, 1, 0, 0, 0, 174,
17, 1, 0, 0, 0, 175, 180, 3, 30, 15, 0, 176, 177, 5, 5, 0, 0, 177, 179,
3, 30, 15, 0, 178, 176, 1, 0, 0, 0, 179, 182, 1, 0, 0, 0, 180, 178, 1,
0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 19, 1, 0, 0, 0, 182, 180, 1, 0, 0,
0, 183, 184, 7, 3, 0, 0, 184, 21, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186,
187, 5, 1, 0, 0, 187, 188, 3, 24, 12, 0, 188, 189, 5, 2, 0, 0, 189, 23,
1, 0, 0, 0, 190, 195, 3, 26, 13, 0, 191, 192, 5, 5, 0, 0, 192, 194, 3,
26, 13, 0, 193, 191, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0,
0, 0, 195, 196, 1, 0, 0, 0, 196, 25, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0,
198, 202, 3, 32, 16, 0, 199, 202, 3, 30, 15, 0, 200, 202, 3, 28, 14, 0,
201, 198, 1, 0, 0, 0, 201, 199, 1, 0, 0, 0, 201, 200, 1, 0, 0, 0, 202,
27, 1, 0, 0, 0, 203, 204, 5, 3, 0, 0, 204, 205, 3, 18, 9, 0, 205, 206,
5, 4, 0, 0, 206, 29, 1, 0, 0, 0, 207, 208, 7, 5, 0, 0, 208, 31, 1, 0, 0,
0, 209, 210, 5, 31, 0, 0, 210, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 69,
147, 159, 173, 180, 195, 201,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 3, 6, 149, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1,
7, 1, 7, 3, 7, 161, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 175, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 180,
8, 9, 10, 9, 12, 9, 183, 9, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11,
1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 195, 8, 12, 10, 12, 12, 12, 198, 9,
12, 1, 13, 1, 13, 1, 13, 3, 13, 203, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14,
1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14,
16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 6, 1, 0, 7, 8, 2, 0, 13, 13, 15,
15, 2, 0, 14, 14, 16, 16, 2, 0, 30, 30, 33, 33, 1, 0, 25, 27, 1, 0, 28,
31, 227, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47,
1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 148, 1, 0, 0, 0,
14, 160, 1, 0, 0, 0, 16, 174, 1, 0, 0, 0, 18, 176, 1, 0, 0, 0, 20, 184,
1, 0, 0, 0, 22, 186, 1, 0, 0, 0, 24, 191, 1, 0, 0, 0, 26, 202, 1, 0, 0,
0, 28, 204, 1, 0, 0, 0, 30, 208, 1, 0, 0, 0, 32, 210, 1, 0, 0, 0, 34, 35,
3, 2, 1, 0, 35, 36, 5, 0, 0, 1, 36, 1, 1, 0, 0, 0, 37, 38, 3, 4, 2, 0,
38, 3, 1, 0, 0, 0, 39, 44, 3, 6, 3, 0, 40, 41, 5, 24, 0, 0, 41, 43, 3,
6, 3, 0, 42, 40, 1, 0, 0, 0, 43, 46, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 44,
45, 1, 0, 0, 0, 45, 5, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 47, 53, 3, 8, 4,
0, 48, 49, 5, 23, 0, 0, 49, 52, 3, 8, 4, 0, 50, 52, 3, 8, 4, 0, 51, 48,
1, 0, 0, 0, 51, 50, 1, 0, 0, 0, 52, 55, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0,
53, 54, 1, 0, 0, 0, 54, 7, 1, 0, 0, 0, 55, 53, 1, 0, 0, 0, 56, 58, 5, 22,
0, 0, 57, 56, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60,
3, 10, 5, 0, 60, 9, 1, 0, 0, 0, 61, 62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0,
63, 64, 5, 2, 0, 0, 64, 71, 1, 0, 0, 0, 65, 71, 3, 12, 6, 0, 66, 71, 3,
22, 11, 0, 67, 71, 3, 20, 10, 0, 68, 71, 3, 32, 16, 0, 69, 71, 3, 30, 15,
0, 70, 61, 1, 0, 0, 0, 70, 65, 1, 0, 0, 0, 70, 66, 1, 0, 0, 0, 70, 67,
1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 69, 1, 0, 0, 0, 71, 11, 1, 0, 0, 0,
72, 73, 3, 32, 16, 0, 73, 74, 5, 6, 0, 0, 74, 75, 3, 30, 15, 0, 75, 149,
1, 0, 0, 0, 76, 77, 3, 32, 16, 0, 77, 78, 7, 0, 0, 0, 78, 79, 3, 30, 15,
0, 79, 149, 1, 0, 0, 0, 80, 81, 3, 32, 16, 0, 81, 82, 5, 9, 0, 0, 82, 83,
3, 30, 15, 0, 83, 149, 1, 0, 0, 0, 84, 85, 3, 32, 16, 0, 85, 86, 5, 10,
0, 0, 86, 87, 3, 30, 15, 0, 87, 149, 1, 0, 0, 0, 88, 89, 3, 32, 16, 0,
89, 90, 5, 11, 0, 0, 90, 91, 3, 30, 15, 0, 91, 149, 1, 0, 0, 0, 92, 93,
3, 32, 16, 0, 93, 94, 5, 12, 0, 0, 94, 95, 3, 30, 15, 0, 95, 149, 1, 0,
0, 0, 96, 97, 3, 32, 16, 0, 97, 98, 7, 1, 0, 0, 98, 99, 3, 30, 15, 0, 99,
149, 1, 0, 0, 0, 100, 101, 3, 32, 16, 0, 101, 102, 7, 2, 0, 0, 102, 103,
3, 30, 15, 0, 103, 149, 1, 0, 0, 0, 104, 105, 3, 32, 16, 0, 105, 106, 5,
17, 0, 0, 106, 107, 3, 30, 15, 0, 107, 108, 5, 23, 0, 0, 108, 109, 3, 30,
15, 0, 109, 149, 1, 0, 0, 0, 110, 111, 3, 32, 16, 0, 111, 112, 5, 22, 0,
0, 112, 113, 5, 17, 0, 0, 113, 114, 3, 30, 15, 0, 114, 115, 5, 23, 0, 0,
115, 116, 3, 30, 15, 0, 116, 149, 1, 0, 0, 0, 117, 118, 3, 32, 16, 0, 118,
119, 3, 14, 7, 0, 119, 149, 1, 0, 0, 0, 120, 121, 3, 32, 16, 0, 121, 122,
3, 16, 8, 0, 122, 149, 1, 0, 0, 0, 123, 124, 3, 32, 16, 0, 124, 125, 5,
18, 0, 0, 125, 149, 1, 0, 0, 0, 126, 127, 3, 32, 16, 0, 127, 128, 5, 22,
0, 0, 128, 129, 5, 18, 0, 0, 129, 149, 1, 0, 0, 0, 130, 131, 3, 32, 16,
0, 131, 132, 5, 19, 0, 0, 132, 133, 3, 30, 15, 0, 133, 149, 1, 0, 0, 0,
134, 135, 3, 32, 16, 0, 135, 136, 5, 22, 0, 0, 136, 137, 5, 19, 0, 0, 137,
138, 3, 30, 15, 0, 138, 149, 1, 0, 0, 0, 139, 140, 3, 32, 16, 0, 140, 141,
5, 20, 0, 0, 141, 142, 3, 30, 15, 0, 142, 149, 1, 0, 0, 0, 143, 144, 3,
32, 16, 0, 144, 145, 5, 22, 0, 0, 145, 146, 5, 20, 0, 0, 146, 147, 3, 30,
15, 0, 147, 149, 1, 0, 0, 0, 148, 72, 1, 0, 0, 0, 148, 76, 1, 0, 0, 0,
148, 80, 1, 0, 0, 0, 148, 84, 1, 0, 0, 0, 148, 88, 1, 0, 0, 0, 148, 92,
1, 0, 0, 0, 148, 96, 1, 0, 0, 0, 148, 100, 1, 0, 0, 0, 148, 104, 1, 0,
0, 0, 148, 110, 1, 0, 0, 0, 148, 117, 1, 0, 0, 0, 148, 120, 1, 0, 0, 0,
148, 123, 1, 0, 0, 0, 148, 126, 1, 0, 0, 0, 148, 130, 1, 0, 0, 0, 148,
134, 1, 0, 0, 0, 148, 139, 1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 149, 13, 1,
0, 0, 0, 150, 151, 5, 21, 0, 0, 151, 152, 5, 1, 0, 0, 152, 153, 3, 18,
9, 0, 153, 154, 5, 2, 0, 0, 154, 161, 1, 0, 0, 0, 155, 156, 5, 21, 0, 0,
156, 157, 5, 3, 0, 0, 157, 158, 3, 18, 9, 0, 158, 159, 5, 4, 0, 0, 159,
161, 1, 0, 0, 0, 160, 150, 1, 0, 0, 0, 160, 155, 1, 0, 0, 0, 161, 15, 1,
0, 0, 0, 162, 163, 5, 22, 0, 0, 163, 164, 5, 21, 0, 0, 164, 165, 5, 1,
0, 0, 165, 166, 3, 18, 9, 0, 166, 167, 5, 2, 0, 0, 167, 175, 1, 0, 0, 0,
168, 169, 5, 22, 0, 0, 169, 170, 5, 21, 0, 0, 170, 171, 5, 3, 0, 0, 171,
172, 3, 18, 9, 0, 172, 173, 5, 4, 0, 0, 173, 175, 1, 0, 0, 0, 174, 162,
1, 0, 0, 0, 174, 168, 1, 0, 0, 0, 175, 17, 1, 0, 0, 0, 176, 181, 3, 30,
15, 0, 177, 178, 5, 5, 0, 0, 178, 180, 3, 30, 15, 0, 179, 177, 1, 0, 0,
0, 180, 183, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182,
19, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 184, 185, 7, 3, 0, 0, 185, 21, 1,
0, 0, 0, 186, 187, 7, 4, 0, 0, 187, 188, 5, 1, 0, 0, 188, 189, 3, 24, 12,
0, 189, 190, 5, 2, 0, 0, 190, 23, 1, 0, 0, 0, 191, 196, 3, 26, 13, 0, 192,
193, 5, 5, 0, 0, 193, 195, 3, 26, 13, 0, 194, 192, 1, 0, 0, 0, 195, 198,
1, 0, 0, 0, 196, 194, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 25, 1, 0,
0, 0, 198, 196, 1, 0, 0, 0, 199, 203, 3, 32, 16, 0, 200, 203, 3, 30, 15,
0, 201, 203, 3, 28, 14, 0, 202, 199, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0,
202, 201, 1, 0, 0, 0, 203, 27, 1, 0, 0, 0, 204, 205, 5, 3, 0, 0, 205, 206,
3, 18, 9, 0, 206, 207, 5, 4, 0, 0, 207, 29, 1, 0, 0, 0, 208, 209, 7, 5,
0, 0, 209, 31, 1, 0, 0, 0, 210, 211, 5, 31, 0, 0, 211, 33, 1, 0, 0, 0,
11, 44, 51, 53, 57, 70, 148, 160, 174, 181, 196, 202,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@ -802,7 +802,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&12058624002) != 0 {
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&12863930370) != 0 {
p.SetState(51)
p.GetErrorHandler().Sync(p)
if p.HasError() {
@ -824,7 +824,7 @@ func (p *FilterQueryParser) AndExpression() (localctx IAndExpressionContext) {
p.UnaryExpression()
}
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
case FilterQueryParserLPAREN, FilterQueryParserNOT, FilterQueryParserHAS, FilterQueryParserHASANY, FilterQueryParserHASALL, FilterQueryParserBOOL, FilterQueryParserNUMBER, FilterQueryParserQUOTED_TEXT, FilterQueryParserKEY, FilterQueryParserFREETEXT:
{
p.SetState(50)
p.UnaryExpression()
@ -1010,6 +1010,7 @@ type IPrimaryContext interface {
FunctionCall() IFunctionCallContext
FullText() IFullTextContext
Key() IKeyContext
Value() IValueContext
// IsPrimaryContext differentiates from other interfaces.
IsPrimaryContext()
@ -1135,6 +1136,22 @@ func (s *PrimaryContext) Key() IKeyContext {
return t.(IKeyContext)
}
func (s *PrimaryContext) Value() IValueContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IValueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IValueContext)
}
func (s *PrimaryContext) GetRuleContext() antlr.RuleContext {
return s
}
@ -1168,7 +1185,7 @@ func (s *PrimaryContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
func (p *FilterQueryParser) Primary() (localctx IPrimaryContext) {
localctx = NewPrimaryContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 10, FilterQueryParserRULE_primary)
p.SetState(69)
p.SetState(70)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -1226,6 +1243,13 @@ func (p *FilterQueryParser) Primary() (localctx IPrimaryContext) {
p.Key()
}
case 6:
p.EnterOuterAlt(localctx, 6)
{
p.SetState(69)
p.Value()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
@ -1502,7 +1526,7 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
p.EnterRule(localctx, 12, FilterQueryParserRULE_comparison)
var _la int
p.SetState(147)
p.SetState(148)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -1512,11 +1536,11 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(71)
p.SetState(72)
p.Key()
}
{
p.SetState(72)
p.SetState(73)
p.Match(FilterQueryParserEQUALS)
if p.HasError() {
// Recognition error - abort rule
@ -1524,18 +1548,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(73)
p.SetState(74)
p.Value()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(75)
p.SetState(76)
p.Key()
}
{
p.SetState(76)
p.SetState(77)
_la = p.GetTokenStream().LA(1)
if !(_la == FilterQueryParserNOT_EQUALS || _la == FilterQueryParserNEQ) {
@ -1546,18 +1570,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(77)
p.SetState(78)
p.Value()
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(79)
p.SetState(80)
p.Key()
}
{
p.SetState(80)
p.SetState(81)
p.Match(FilterQueryParserLT)
if p.HasError() {
// Recognition error - abort rule
@ -1565,18 +1589,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(81)
p.SetState(82)
p.Value()
}
case 4:
p.EnterOuterAlt(localctx, 4)
{
p.SetState(83)
p.SetState(84)
p.Key()
}
{
p.SetState(84)
p.SetState(85)
p.Match(FilterQueryParserLE)
if p.HasError() {
// Recognition error - abort rule
@ -1584,18 +1608,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(85)
p.SetState(86)
p.Value()
}
case 5:
p.EnterOuterAlt(localctx, 5)
{
p.SetState(87)
p.SetState(88)
p.Key()
}
{
p.SetState(88)
p.SetState(89)
p.Match(FilterQueryParserGT)
if p.HasError() {
// Recognition error - abort rule
@ -1603,18 +1627,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(89)
p.SetState(90)
p.Value()
}
case 6:
p.EnterOuterAlt(localctx, 6)
{
p.SetState(91)
p.SetState(92)
p.Key()
}
{
p.SetState(92)
p.SetState(93)
p.Match(FilterQueryParserGE)
if p.HasError() {
// Recognition error - abort rule
@ -1622,18 +1646,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(93)
p.SetState(94)
p.Value()
}
case 7:
p.EnterOuterAlt(localctx, 7)
{
p.SetState(95)
p.SetState(96)
p.Key()
}
{
p.SetState(96)
p.SetState(97)
_la = p.GetTokenStream().LA(1)
if !(_la == FilterQueryParserLIKE || _la == FilterQueryParserILIKE) {
@ -1644,18 +1668,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(97)
p.SetState(98)
p.Value()
}
case 8:
p.EnterOuterAlt(localctx, 8)
{
p.SetState(99)
p.SetState(100)
p.Key()
}
{
p.SetState(100)
p.SetState(101)
_la = p.GetTokenStream().LA(1)
if !(_la == FilterQueryParserNOT_LIKE || _la == FilterQueryParserNOT_ILIKE) {
@ -1666,18 +1690,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(101)
p.SetState(102)
p.Value()
}
case 9:
p.EnterOuterAlt(localctx, 9)
{
p.SetState(103)
p.SetState(104)
p.Key()
}
{
p.SetState(104)
p.SetState(105)
p.Match(FilterQueryParserBETWEEN)
if p.HasError() {
// Recognition error - abort rule
@ -1685,11 +1709,11 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(105)
p.SetState(106)
p.Value()
}
{
p.SetState(106)
p.SetState(107)
p.Match(FilterQueryParserAND)
if p.HasError() {
// Recognition error - abort rule
@ -1697,18 +1721,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(107)
p.SetState(108)
p.Value()
}
case 10:
p.EnterOuterAlt(localctx, 10)
{
p.SetState(109)
p.SetState(110)
p.Key()
}
{
p.SetState(110)
p.SetState(111)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@ -1716,7 +1740,7 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(111)
p.SetState(112)
p.Match(FilterQueryParserBETWEEN)
if p.HasError() {
// Recognition error - abort rule
@ -1724,11 +1748,11 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(112)
p.SetState(113)
p.Value()
}
{
p.SetState(113)
p.SetState(114)
p.Match(FilterQueryParserAND)
if p.HasError() {
// Recognition error - abort rule
@ -1736,40 +1760,40 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(114)
p.SetState(115)
p.Value()
}
case 11:
p.EnterOuterAlt(localctx, 11)
{
p.SetState(116)
p.SetState(117)
p.Key()
}
{
p.SetState(117)
p.SetState(118)
p.InClause()
}
case 12:
p.EnterOuterAlt(localctx, 12)
{
p.SetState(119)
p.SetState(120)
p.Key()
}
{
p.SetState(120)
p.SetState(121)
p.NotInClause()
}
case 13:
p.EnterOuterAlt(localctx, 13)
{
p.SetState(122)
p.SetState(123)
p.Key()
}
{
p.SetState(123)
p.SetState(124)
p.Match(FilterQueryParserEXISTS)
if p.HasError() {
// Recognition error - abort rule
@ -1780,11 +1804,11 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
case 14:
p.EnterOuterAlt(localctx, 14)
{
p.SetState(125)
p.SetState(126)
p.Key()
}
{
p.SetState(126)
p.SetState(127)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@ -1792,7 +1816,7 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(127)
p.SetState(128)
p.Match(FilterQueryParserEXISTS)
if p.HasError() {
// Recognition error - abort rule
@ -1803,11 +1827,11 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
case 15:
p.EnterOuterAlt(localctx, 15)
{
p.SetState(129)
p.SetState(130)
p.Key()
}
{
p.SetState(130)
p.SetState(131)
p.Match(FilterQueryParserREGEXP)
if p.HasError() {
// Recognition error - abort rule
@ -1815,27 +1839,19 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(131)
p.SetState(132)
p.Value()
}
case 16:
p.EnterOuterAlt(localctx, 16)
{
p.SetState(133)
p.SetState(134)
p.Key()
}
{
p.SetState(134)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(135)
p.Match(FilterQueryParserREGEXP)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
@ -1843,17 +1859,25 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
{
p.SetState(136)
p.Match(FilterQueryParserREGEXP)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(137)
p.Value()
}
case 17:
p.EnterOuterAlt(localctx, 17)
{
p.SetState(138)
p.SetState(139)
p.Key()
}
{
p.SetState(139)
p.SetState(140)
p.Match(FilterQueryParserCONTAINS)
if p.HasError() {
// Recognition error - abort rule
@ -1861,18 +1885,18 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(140)
p.SetState(141)
p.Value()
}
case 18:
p.EnterOuterAlt(localctx, 18)
{
p.SetState(142)
p.SetState(143)
p.Key()
}
{
p.SetState(143)
p.SetState(144)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@ -1880,7 +1904,7 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(144)
p.SetState(145)
p.Match(FilterQueryParserCONTAINS)
if p.HasError() {
// Recognition error - abort rule
@ -1888,7 +1912,7 @@ func (p *FilterQueryParser) Comparison() (localctx IComparisonContext) {
}
}
{
p.SetState(145)
p.SetState(146)
p.Value()
}
@ -2029,7 +2053,7 @@ func (s *InClauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
localctx = NewInClauseContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, FilterQueryParserRULE_inClause)
p.SetState(159)
p.SetState(160)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -2039,7 +2063,7 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(149)
p.SetState(150)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@ -2047,7 +2071,7 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
}
}
{
p.SetState(150)
p.SetState(151)
p.Match(FilterQueryParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2055,11 +2079,11 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
}
}
{
p.SetState(151)
p.SetState(152)
p.ValueList()
}
{
p.SetState(152)
p.SetState(153)
p.Match(FilterQueryParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2070,7 +2094,7 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(154)
p.SetState(155)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@ -2078,7 +2102,7 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
}
}
{
p.SetState(155)
p.SetState(156)
p.Match(FilterQueryParserLBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -2086,11 +2110,11 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
}
}
{
p.SetState(156)
p.SetState(157)
p.ValueList()
}
{
p.SetState(157)
p.SetState(158)
p.Match(FilterQueryParserRBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -2240,7 +2264,7 @@ func (s *NotInClauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{}
func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
localctx = NewNotInClauseContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, FilterQueryParserRULE_notInClause)
p.SetState(173)
p.SetState(174)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -2250,7 +2274,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(161)
p.SetState(162)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@ -2258,7 +2282,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(162)
p.SetState(163)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@ -2266,7 +2290,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(163)
p.SetState(164)
p.Match(FilterQueryParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2274,11 +2298,11 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(164)
p.SetState(165)
p.ValueList()
}
{
p.SetState(165)
p.SetState(166)
p.Match(FilterQueryParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2289,7 +2313,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(167)
p.SetState(168)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@ -2297,7 +2321,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(168)
p.SetState(169)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@ -2305,7 +2329,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(169)
p.SetState(170)
p.Match(FilterQueryParserLBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -2313,11 +2337,11 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(170)
p.SetState(171)
p.ValueList()
}
{
p.SetState(171)
p.SetState(172)
p.Match(FilterQueryParserRBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -2477,10 +2501,10 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(175)
p.SetState(176)
p.Value()
}
p.SetState(180)
p.SetState(181)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -2489,7 +2513,7 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
for _la == FilterQueryParserCOMMA {
{
p.SetState(176)
p.SetState(177)
p.Match(FilterQueryParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
@ -2497,11 +2521,11 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
}
}
{
p.SetState(177)
p.SetState(178)
p.Value()
}
p.SetState(182)
p.SetState(183)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -2614,7 +2638,7 @@ func (p *FilterQueryParser) FullText() (localctx IFullTextContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(183)
p.SetState(184)
_la = p.GetTokenStream().LA(1)
if !(_la == FilterQueryParserQUOTED_TEXT || _la == FilterQueryParserFREETEXT) {
@ -2762,7 +2786,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(185)
p.SetState(186)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&234881024) != 0) {
@ -2773,7 +2797,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
}
}
{
p.SetState(186)
p.SetState(187)
p.Match(FilterQueryParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2781,11 +2805,11 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
}
}
{
p.SetState(187)
p.SetState(188)
p.FunctionParamList()
}
{
p.SetState(188)
p.SetState(189)
p.Match(FilterQueryParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
@ -2941,10 +2965,10 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
p.EnterOuterAlt(localctx, 1)
{
p.SetState(190)
p.SetState(191)
p.FunctionParam()
}
p.SetState(195)
p.SetState(196)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -2953,7 +2977,7 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
for _la == FilterQueryParserCOMMA {
{
p.SetState(191)
p.SetState(192)
p.Match(FilterQueryParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
@ -2961,11 +2985,11 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
}
}
{
p.SetState(192)
p.SetState(193)
p.FunctionParam()
}
p.SetState(197)
p.SetState(198)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -3115,7 +3139,7 @@ func (s *FunctionParamContext) Accept(visitor antlr.ParseTreeVisitor) interface{
func (p *FilterQueryParser) FunctionParam() (localctx IFunctionParamContext) {
localctx = NewFunctionParamContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 26, FilterQueryParserRULE_functionParam)
p.SetState(201)
p.SetState(202)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@ -3125,21 +3149,21 @@ func (p *FilterQueryParser) FunctionParam() (localctx IFunctionParamContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(198)
p.SetState(199)
p.Key()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(199)
p.SetState(200)
p.Value()
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(200)
p.SetState(201)
p.Array()
}
@ -3267,7 +3291,7 @@ func (p *FilterQueryParser) Array() (localctx IArrayContext) {
p.EnterRule(localctx, 28, FilterQueryParserRULE_array)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(203)
p.SetState(204)
p.Match(FilterQueryParserLBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -3275,11 +3299,11 @@ func (p *FilterQueryParser) Array() (localctx IArrayContext) {
}
}
{
p.SetState(204)
p.SetState(205)
p.ValueList()
}
{
p.SetState(205)
p.SetState(206)
p.Match(FilterQueryParserRBRACK)
if p.HasError() {
// Recognition error - abort rule
@ -3402,7 +3426,7 @@ func (p *FilterQueryParser) Value() (localctx IValueContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(207)
p.SetState(208)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4026531840) != 0) {
@ -3511,7 +3535,7 @@ func (p *FilterQueryParser) Key() (localctx IKeyContext) {
p.EnterRule(localctx, 32, FilterQueryParserRULE_key)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(209)
p.SetState(210)
p.Match(FilterQueryParserKEY)
if p.HasError() {
// Recognition error - abort rule

View File

@ -0,0 +1,281 @@
package querybuilder
import (
"github.com/SigNoz/signoz/pkg/valuer"
)
var (
AggreFuncMap = map[valuer.String]AggrFunc{}
)
type AggrFunc struct {
Name valuer.String
FuncName string
Aliases []valuer.String
RequireArgs bool
FuncCombinator bool
Rate bool
MinArgs int
MaxArgs int
}
var (
AggrFuncCount = AggrFunc{
Name: valuer.NewString("count"),
FuncName: "count",
RequireArgs: false, MinArgs: 0, MaxArgs: 1,
}
AggrFuncCountIf = AggrFunc{
Name: valuer.NewString("countif"),
FuncName: "countIf",
Aliases: []valuer.String{valuer.NewString("count_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncCountDistinct = AggrFunc{
Name: valuer.NewString("countdistinct"),
FuncName: "countDistinct",
Aliases: []valuer.String{valuer.NewString("count_distinct")},
RequireArgs: true, MinArgs: 1, MaxArgs: 10,
}
AggrFuncCountDistinctIf = AggrFunc{
Name: valuer.NewString("countdistinctif"),
FuncName: "countDistinctIf",
Aliases: []valuer.String{valuer.NewString("count_distinct_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncSum = AggrFunc{
Name: valuer.NewString("sum"),
FuncName: "sum",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncSumIf = AggrFunc{
Name: valuer.NewString("sumif"),
FuncName: "sumIf",
Aliases: []valuer.String{valuer.NewString("sum_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncAvg = AggrFunc{
Name: valuer.NewString("avg"),
FuncName: "avg",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncAvgIf = AggrFunc{
Name: valuer.NewString("avgif"),
FuncName: "avgIf",
Aliases: []valuer.String{valuer.NewString("avg_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncMin = AggrFunc{
Name: valuer.NewString("min"),
FuncName: "min",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncMinIf = AggrFunc{
Name: valuer.NewString("minif"),
FuncName: "minIf",
Aliases: []valuer.String{valuer.NewString("min_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncMax = AggrFunc{
Name: valuer.NewString("max"),
FuncName: "max",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncMaxIf = AggrFunc{
Name: valuer.NewString("maxif"),
FuncName: "maxIf",
Aliases: []valuer.String{valuer.NewString("max_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP05 = AggrFunc{
Name: valuer.NewString("p05"),
FuncName: "quantile(0.05)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP05IF = AggrFunc{
Name: valuer.NewString("p05if"),
FuncName: "quantileIf(0.05)",
Aliases: []valuer.String{valuer.NewString("p05_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP10 = AggrFunc{
Name: valuer.NewString("p10"),
FuncName: "quantile(0.10)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP10IF = AggrFunc{
Name: valuer.NewString("p10if"),
FuncName: "quantileIf(0.10)",
Aliases: []valuer.String{valuer.NewString("p10_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP20 = AggrFunc{
Name: valuer.NewString("p20"),
FuncName: "quantile(0.20)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP20IF = AggrFunc{
Name: valuer.NewString("p20if"),
FuncName: "quantileIf(0.20)",
Aliases: []valuer.String{valuer.NewString("p20_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP25 = AggrFunc{
Name: valuer.NewString("p25"),
FuncName: "quantile(0.25)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP25IF = AggrFunc{
Name: valuer.NewString("p25if"),
FuncName: "quantileIf(0.25)",
Aliases: []valuer.String{valuer.NewString("p25_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP50 = AggrFunc{
Name: valuer.NewString("p50"),
FuncName: "quantile(0.50)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP50IF = AggrFunc{
Name: valuer.NewString("p50if"),
FuncName: "quantileIf(0.50)",
Aliases: []valuer.String{valuer.NewString("p50_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP75 = AggrFunc{
Name: valuer.NewString("p75"),
FuncName: "quantile(0.75)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP75IF = AggrFunc{
Name: valuer.NewString("p75if"),
FuncName: "quantileIf(0.75)",
Aliases: []valuer.String{valuer.NewString("p75_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP90 = AggrFunc{
Name: valuer.NewString("p90"),
FuncName: "quantile(0.90)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP90IF = AggrFunc{
Name: valuer.NewString("p90if"),
FuncName: "quantileIf(0.90)",
Aliases: []valuer.String{valuer.NewString("p90_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP95 = AggrFunc{
Name: valuer.NewString("p95"),
FuncName: "quantile(0.95)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP95IF = AggrFunc{
Name: valuer.NewString("p95if"),
FuncName: "quantileIf(0.95)",
Aliases: []valuer.String{valuer.NewString("p95_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP99 = AggrFunc{
Name: valuer.NewString("p99"),
FuncName: "quantile(0.99)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP99IF = AggrFunc{
Name: valuer.NewString("p99if"),
FuncName: "quantileIf(0.99)",
Aliases: []valuer.String{valuer.NewString("p99_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncP999 = AggrFunc{
Name: valuer.NewString("p999"),
FuncName: "quantile(0.999)",
RequireArgs: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncP999IF = AggrFunc{
Name: valuer.NewString("p999if"),
FuncName: "quantileIf(0.999)",
Aliases: []valuer.String{valuer.NewString("p999_if")},
RequireArgs: true, FuncCombinator: true, MinArgs: 2, MaxArgs: 2,
}
AggrFuncRate = AggrFunc{
Name: valuer.NewString("rate"),
FuncName: "count",
RequireArgs: true, Rate: true, MinArgs: 0, MaxArgs: 1,
}
AggrFuncRateIf = AggrFunc{
Name: valuer.NewString("rateif"),
FuncName: "count",
Aliases: []valuer.String{valuer.NewString("rate_if")},
RequireArgs: true, Rate: true, FuncCombinator: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncRateSum = AggrFunc{
Name: valuer.NewString("rate_sum"),
FuncName: "sum",
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncRateAvg = AggrFunc{
Name: valuer.NewString("rate_avg"),
FuncName: "avg",
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncRateMin = AggrFunc{
Name: valuer.NewString("rate_min"),
FuncName: "min",
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
}
AggrFuncRateMax = AggrFunc{
Name: valuer.NewString("rate_max"),
FuncName: "max",
RequireArgs: true, Rate: true, MinArgs: 1, MaxArgs: 1,
}
)
func init() {
var aggFuncs = []AggrFunc{
AggrFuncCount,
AggrFuncCountIf,
AggrFuncCountDistinct,
AggrFuncCountDistinctIf,
AggrFuncSum,
AggrFuncSumIf,
AggrFuncAvg,
AggrFuncAvgIf,
AggrFuncMin,
AggrFuncMinIf,
AggrFuncMax,
AggrFuncMaxIf,
AggrFuncP05,
AggrFuncP05IF,
AggrFuncP10,
AggrFuncP10IF,
AggrFuncP20,
AggrFuncP20IF,
AggrFuncP25,
AggrFuncP25IF,
AggrFuncP50,
AggrFuncP50IF,
AggrFuncP75,
AggrFuncP75IF,
AggrFuncP90,
AggrFuncP90IF,
AggrFuncP95,
AggrFuncP95IF,
AggrFuncP99,
AggrFuncP99IF,
AggrFuncP999,
AggrFuncP999IF,
AggrFuncRate,
AggrFuncRateIf,
AggrFuncRateSum,
AggrFuncRateAvg,
AggrFuncRateMin,
AggrFuncRateMax,
}
for _, aggFunc := range aggFuncs {
AggreFuncMap[aggFunc.Name] = aggFunc
for _, alias := range aggFunc.Aliases {
AggreFuncMap[alias] = aggFunc
}
}
}

View File

@ -0,0 +1,245 @@
package querybuilder
import (
"context"
"fmt"
"strings"
chparser "github.com/AfterShip/clickhouse-sql-parser/parser"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
)
type AggExprRewriterOptions struct {
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
FullTextColumn *telemetrytypes.TelemetryFieldKey
FieldMapper qbtypes.FieldMapper
ConditionBuilder qbtypes.ConditionBuilder
JsonBodyPrefix string
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
RateInterval uint64
}
type aggExprRewriter struct {
opts AggExprRewriterOptions
}
func NewAggExprRewriter(opts AggExprRewriterOptions) *aggExprRewriter {
return &aggExprRewriter{opts: opts}
}
// Rewrite parses the given aggregation expression, maps the column, and condition to
// valid data source column and condition expression, and returns the rewritten expression
// and the args if the parametric aggregation function is used.
func (r *aggExprRewriter) Rewrite(expr string) (string, []any, error) {
wrapped := fmt.Sprintf("SELECT %s", expr)
p := chparser.NewParser(wrapped)
stmts, err := p.ParseStmts()
if err != nil {
return "", nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to parse aggregation expression %q", expr)
}
if len(stmts) == 0 {
return "", nil, errors.NewInternalf(errors.CodeInternal, "no statements found for %q", expr)
}
sel, ok := stmts[0].(*chparser.SelectQuery)
if !ok {
return "", nil, errors.NewInternalf(errors.CodeInternal, "expected SelectQuery, got %T", stmts[0])
}
if len(sel.SelectItems) == 0 {
return "", nil, errors.NewInternalf(errors.CodeInternal, "no SELECT items for %q", expr)
}
visitor := newExprVisitor(r.opts.FieldKeys,
r.opts.FullTextColumn,
r.opts.FieldMapper,
r.opts.ConditionBuilder,
r.opts.JsonBodyPrefix,
r.opts.JsonKeyToKey,
)
// Rewrite the first select item (our expression)
if err := sel.SelectItems[0].Accept(visitor); err != nil {
return "", nil, err
}
// If nothing changed, return original
if !visitor.Modified {
return expr, nil, nil
}
if visitor.isRate {
return fmt.Sprintf("%s/%d", sel.SelectItems[0].String(), r.opts.RateInterval), visitor.chArgs, nil
}
return sel.SelectItems[0].String(), visitor.chArgs, nil
}
// RewriteMultiple rewrites a slice of expressions.
func (r *aggExprRewriter) RewriteMultiple(
exprs []string,
) ([]string, [][]any, error) {
out := make([]string, len(exprs))
var errs []error
var chArgsList [][]any
for i, e := range exprs {
w, chArgs, err := r.Rewrite(e)
if err != nil {
errs = append(errs, err)
out[i] = e
} else {
out[i] = w
chArgsList = append(chArgsList, chArgs)
}
}
if len(errs) > 0 {
return out, nil, errors.Join(errs...)
}
return out, chArgsList, nil
}
// exprVisitor walks FunctionExpr nodes and applies the mappers.
type exprVisitor struct {
chparser.DefaultASTVisitor
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
fullTextColumn *telemetrytypes.TelemetryFieldKey
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
Modified bool
chArgs []any
isRate bool
}
func newExprVisitor(
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
jsonBodyPrefix string,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *exprVisitor {
return &exprVisitor{
fieldKeys: fieldKeys,
fullTextColumn: fullTextColumn,
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
jsonBodyPrefix: jsonBodyPrefix,
jsonKeyToKey: jsonKeyToKey,
}
}
// VisitFunctionExpr is invoked for each function call in the AST.
func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
name := strings.ToLower(fn.Name.Name)
aggFunc, ok := AggreFuncMap[valuer.NewString(name)]
if !ok {
return nil
}
var args []chparser.Expr
if fn.Params != nil && fn.Params.Items != nil {
args = fn.Params.Items.Items
}
// if we know aggregation function, we must ensure that the number of arguments is correct
if aggFunc.RequireArgs {
if len(args) < aggFunc.MinArgs || len(args) > aggFunc.MaxArgs {
return errors.NewInternalf(errors.CodeInternal, "invalid number of arguments for %q: %d", name, len(args))
}
}
fn.Name.Name = aggFunc.FuncName
if aggFunc.Rate {
v.isRate = true
}
// Handle *If functions with predicate + values
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, _, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
FieldKeys: v.fieldKeys,
FieldMapper: v.fieldMapper,
ConditionBuilder: v.conditionBuilder,
FullTextColumn: v.fullTextColumn,
JsonBodyPrefix: v.jsonBodyPrefix,
JsonKeyToKey: v.jsonKeyToKey,
},
)
if err != nil {
return err
}
newPred, chArgs := whereClause.BuildWithFlavor(sqlbuilder.ClickHouse)
newPred = strings.TrimPrefix(newPred, "WHERE")
parsedPred, err := parseFragment(newPred)
if err != nil {
return err
}
args[len(args)-1] = parsedPred
v.Modified = true
v.chArgs = chArgs
// Map each value column argument
for i := 0; i < len(args)-1; i++ {
origVal := args[i].String()
colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: origVal}, v.fieldKeys)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
}
newVal := colName
parsedVal, err := parseFragment(newVal)
if err != nil {
return err
}
args[i] = parsedVal
v.Modified = true
}
} else {
// Non-If functions: map every argument as a column/value
for i, arg := range args {
orig := arg.String()
colName, err := v.fieldMapper.ColumnExpressionFor(context.Background(), &telemetrytypes.TelemetryFieldKey{Name: orig}, v.fieldKeys)
if err != nil {
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", orig)
}
newCol := colName
parsed, err := parseFragment(newCol)
if err != nil {
return err
}
args[i] = parsed
v.Modified = true
}
if aggFunc.Rate {
v.Modified = true
}
}
return nil
}
// parseFragment parses a SQL expression fragment by wrapping in SELECT.
func parseFragment(sql string) (chparser.Expr, error) {
wrapped := fmt.Sprintf("SELECT %s", sql)
p := chparser.NewParser(wrapped)
stmts, err := p.ParseStmts()
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to parse re-written expression %q", sql)
}
sel, ok := stmts[0].(*chparser.SelectQuery)
if !ok {
return nil, errors.NewInternalf(errors.CodeInternal, "unexpected statement type in re-written expression %q: %T", sql, stmts[0])
}
if len(sel.SelectItems) == 0 {
return nil, errors.NewInternalf(errors.CodeInternal, "no select items in re-written expression %q", sql)
}
return sel.SelectItems[0].Expr, nil
}

View File

@ -0,0 +1,235 @@
package querybuilder
import (
"fmt"
"slices"
"sort"
"strings"
"github.com/antlr4-go/antlr/v4"
)
var skipTokens = []string{"WS", "COMMENT"}
// friendly maps SYMBOLIC_NAME -> what the user should see.
var friendly = map[string]string{
// punctuation & operators
"LPAREN": "(", "RPAREN": ")",
"LBRACK": "[", "RBRACK": "]",
"COMMA": ",",
"EQUALS": "=",
"NOT_EQUALS": "!=",
"NEQ": "<>",
"LT": "<", "LE": "<=",
"GT": ">", "GE": ">=",
// keywords / functions
"AND": "AND",
"OR": "OR",
"NOT": "NOT",
"LIKE": "LIKE", "ILIKE": "ILIKE",
"NOT_LIKE": "NOT LIKE", "NOT_ILIKE": "NOT ILIKE",
"BETWEEN": "BETWEEN", "IN": "IN", "EXISTS": "EXISTS",
"REGEXP": "REGEXP", "CONTAINS": "CONTAINS",
"HAS": "has()", "HASANY": "hasAny()", "HASALL": "hasAll()",
// literals / identifiers
"NUMBER": "number",
"BOOL": "boolean",
"QUOTED_TEXT": "quoted text",
"KEY": "field name (ex: service.name)",
}
// prettyToken returns the nicest human label for token type tType.
//
// Order of preference:
// 1. hard-coded friendly table
// 2. literal name from grammar e.g. "'('"
// 3. symbolic name e.g. AND
// 4. numeric fallback e.g. <34>
func prettyToken(p antlr.Parser, tType int) (string, bool) {
if slices.Contains(skipTokens, tokenName(p, tType)) {
return "", false
}
// symbolic name -> friendly ?
syms := p.GetSymbolicNames()
if tType >= 0 && tType < len(syms) {
if nice, ok := friendly[syms[tType]]; ok {
return nice, true
}
}
// literal name (the quoted punctuation that ANTLR generates)
lits := p.GetLiteralNames()
if tType >= 0 && tType < len(lits) && lits[tType] != "" {
return lits[tType], true
}
// symbolic name as last resort (but hide WS, EOF, …)
if tType >= 0 && tType < len(syms) && syms[tType] != "" && syms[tType] != "WS" {
return syms[tType], true
}
return "", false // tell caller to skip this entry
}
type SyntaxErr struct {
Line, Col int
TokenTxt string // offending text (or EOF)
TokenType int // offending token type
Expected []string // token names the parser still expected
RuleStack []string
Msg string
}
func (e *SyntaxErr) Error() string {
exp := ""
if len(e.Expected) > 0 {
exp = "expecting one of {" + strings.Join(e.Expected, ", ") + "}" + " but got " + e.TokenTxt
}
return fmt.Sprintf("line %d:%d %s", e.Line, e.Col, exp)
}
type Ambiguity struct {
Text string // slice of raw input that was ambiguous
Alts string // e.g. "{1, 3}"
RStack []string
}
func (a *Ambiguity) Error() string {
return fmt.Sprintf("ambiguity: %s, alts: %s", a.Text, a.Alts)
}
type ErrorListener struct {
antlr.DefaultErrorListener
SyntaxErrors []*SyntaxErr
Ambigs []*Ambiguity
}
func NewErrorListener() *ErrorListener { return &ErrorListener{} }
func (l *ErrorListener) SyntaxError(
rec antlr.Recognizer,
off any,
line, column int,
msg string,
e antlr.RecognitionException,
) {
err := &SyntaxErr{Line: line, Col: column, Msg: msg}
if tok, ok := off.(antlr.Token); ok {
if tok.GetTokenType() == antlr.TokenEOF {
err.TokenTxt = "EOF"
err.TokenType = tok.GetTokenType()
} else {
err.TokenTxt = fmt.Sprintf("'%s'", tok.GetText())
err.TokenType = tok.GetTokenType()
}
}
if p, ok := rec.(antlr.Parser); ok {
set := p.GetExpectedTokens()
// Heuristic: if KEY appears in the expected set *alongside* any literal
// value tokens, we assume it stands for a bare value. Otherwise, it stands
// for a lefthand identifier.
valueTokens := map[int]struct{}{
pGetTokenType(p, "QUOTED_TEXT"): {},
pGetTokenType(p, "NUMBER"): {},
pGetTokenType(p, "BOOL"): {},
}
hasValueLiterals := false
for _, iv := range set.GetIntervals() {
for t := iv.Start; t <= iv.Stop; t++ {
if _, ok := valueTokens[t]; ok {
hasValueLiterals = true
break
}
}
if hasValueLiterals {
break
}
}
uniq := map[string]struct{}{}
for _, iv := range set.GetIntervals() {
for t := iv.Start; t <= iv.Stop; t++ {
sym := tokenName(p, t)
if sym == "KEY" {
if !hasValueLiterals {
uniq["field name (ex: service.name)"] = struct{}{}
}
continue
}
if label, ok := prettyToken(p, t); ok {
uniq[label] = struct{}{}
}
}
}
err.Expected = make([]string, 0, len(uniq))
for k := range uniq {
err.Expected = append(err.Expected, k)
}
sort.Strings(err.Expected)
err.RuleStack = p.GetRuleInvocationStack(nil)
}
l.SyntaxErrors = append(l.SyntaxErrors, err)
}
func (l *ErrorListener) ReportAmbiguity(
rec antlr.Parser,
dfa *antlr.DFA,
startIdx, stopIdx int,
exact bool,
ambigAlts *antlr.BitSet,
configs *antlr.ATNConfigSet,
) {
if !exact {
return
}
stream := rec.GetTokenStream()
txt := textSlice(stream, startIdx, stopIdx)
l.Ambigs = append(l.Ambigs, &Ambiguity{
Text: txt,
Alts: ambigAlts.String(),
RStack: rec.GetRuleInvocationStack(nil),
})
}
func pGetTokenType(p antlr.Parser, tName string) int {
syms := p.GetSymbolicNames()
for i, sym := range syms {
if sym == tName {
return i
}
}
return -1
}
// tokenName prefers literal > symbolic > numeric.
func tokenName(p antlr.Parser, tType int) string {
lits := p.GetLiteralNames()
if tType >= 0 && tType < len(lits) && lits[tType] != "" {
return lits[tType]
}
syms := p.GetSymbolicNames()
if tType >= 0 && tType < len(syms) && syms[tType] != "" {
return syms[tType]
}
return fmt.Sprintf("<%d>", tType)
}
// textSlice pulls raw input text between two token indexes.
func textSlice(ts antlr.TokenStream, start, stop int) string {
var b strings.Builder
for i := start; i <= stop && i >= 0; i++ {
if tok := ts.Get(i); tok != nil && tok.GetTokenType() != antlr.TokenEOF {
b.WriteString(tok.GetText())
}
}
return b.String()
}

View File

@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
grammar "github.com/SigNoz/signoz/pkg/parser/grammar"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/antlr4-go/antlr/v4"
@ -16,75 +15,58 @@ import (
sqlbuilder "github.com/huandu/go-sqlbuilder"
)
// WhereClauseVisitor implements the FilterQueryVisitor interface
// filterExpressionVisitor implements the FilterQueryVisitor interface
// to convert the parsed filter expressions into ClickHouse WHERE clause
type WhereClauseVisitor struct {
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
warnings []error
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
errors []error
builder *sqlbuilder.SelectBuilder
fullTextColumn *telemetrytypes.TelemetryFieldKey
type filterExpressionVisitor struct {
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
warnings []string
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
errors []error
builder *sqlbuilder.SelectBuilder
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonBodyPrefix string
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
skipResourceFilter bool
}
// NewWhereClauseVisitor creates a new WhereClauseVisitor
func NewWhereClauseVisitor(
conditionBuilder qbtypes.ConditionBuilder,
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
builder *sqlbuilder.SelectBuilder,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
) *WhereClauseVisitor {
return &WhereClauseVisitor{
conditionBuilder: conditionBuilder,
fieldKeys: fieldKeys,
builder: builder,
fullTextColumn: fullTextColumn,
type FilterExprVisitorOpts struct {
FieldMapper qbtypes.FieldMapper
ConditionBuilder qbtypes.ConditionBuilder
FieldKeys map[string][]*telemetrytypes.TelemetryFieldKey
Builder *sqlbuilder.SelectBuilder
FullTextColumn *telemetrytypes.TelemetryFieldKey
JsonBodyPrefix string
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
SkipResourceFilter bool
}
// newFilterExpressionVisitor creates a new filterExpressionVisitor
func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVisitor {
return &filterExpressionVisitor{
fieldMapper: opts.FieldMapper,
conditionBuilder: opts.ConditionBuilder,
fieldKeys: opts.FieldKeys,
builder: opts.Builder,
fullTextColumn: opts.FullTextColumn,
jsonBodyPrefix: opts.JsonBodyPrefix,
jsonKeyToKey: opts.JsonKeyToKey,
skipResourceFilter: opts.SkipResourceFilter,
}
}
type SyntaxError struct {
line, column int
msg string
}
func (e *SyntaxError) Error() string {
return fmt.Sprintf("line %d:%d %s", e.line, e.column, e.msg)
}
// ErrorListener is a custom error listener to capture syntax errors
type ErrorListener struct {
*antlr.DefaultErrorListener
Errors []error
}
// NewErrorListener creates a new error listener
func NewErrorListener() *ErrorListener {
return &ErrorListener{
DefaultErrorListener: antlr.NewDefaultErrorListener(),
Errors: []error{},
}
}
// SyntaxError captures syntax errors during parsing
func (l *ErrorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
l.Errors = append(l.Errors, &SyntaxError{line: line, column: column, msg: msg})
}
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
func PrepareWhereClause(
query string,
fieldKeys map[string][]*telemetrytypes.TelemetryFieldKey,
conditionBuilder qbtypes.ConditionBuilder,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
) (*sqlbuilder.WhereClause, []error, error) {
func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*sqlbuilder.WhereClause, []string, error) {
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(query)
lexer := grammar.NewFilterQueryLexer(input)
sb := sqlbuilder.NewSelectBuilder()
if opts.Builder == nil {
sb := sqlbuilder.NewSelectBuilder()
opts.Builder = sb
}
visitor := NewWhereClauseVisitor(conditionBuilder, fieldKeys, sb, fullTextColumn)
visitor := newFilterExpressionVisitor(opts)
// Set up error handling
lexerErrorListener := NewErrorListener()
@ -101,13 +83,13 @@ func PrepareWhereClause(
tree := parser.Query()
// Handle syntax errors
if len(parserErrorListener.Errors) > 0 {
if len(parserErrorListener.SyntaxErrors) > 0 {
combinedErrors := errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"found %d syntax errors while parsing the search expression: %v",
len(parserErrorListener.Errors),
parserErrorListener.Errors,
"found %d syntax errors while parsing the filter expression: %v",
len(parserErrorListener.SyntaxErrors),
parserErrorListener.SyntaxErrors,
)
return nil, nil, combinedErrors
}
@ -133,7 +115,7 @@ func PrepareWhereClause(
}
// Visit dispatches to the specific visit method based on node type
func (v *WhereClauseVisitor) Visit(tree antlr.ParseTree) any {
func (v *filterExpressionVisitor) Visit(tree antlr.ParseTree) any {
// Handle nil nodes to prevent panic
if tree == nil {
return ""
@ -179,18 +161,18 @@ func (v *WhereClauseVisitor) Visit(tree antlr.ParseTree) any {
}
}
func (v *WhereClauseVisitor) VisitQuery(ctx *grammar.QueryContext) any {
func (v *filterExpressionVisitor) VisitQuery(ctx *grammar.QueryContext) any {
return v.Visit(ctx.Expression())
}
// VisitExpression passes through to the orExpression
func (v *WhereClauseVisitor) VisitExpression(ctx *grammar.ExpressionContext) any {
func (v *filterExpressionVisitor) VisitExpression(ctx *grammar.ExpressionContext) any {
return v.Visit(ctx.OrExpression())
}
// VisitOrExpression handles OR expressions
func (v *WhereClauseVisitor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
func (v *filterExpressionVisitor) VisitOrExpression(ctx *grammar.OrExpressionContext) any {
andExpressions := ctx.AllAndExpression()
andExpressionConditions := make([]string, len(andExpressions))
@ -206,7 +188,7 @@ func (v *WhereClauseVisitor) VisitOrExpression(ctx *grammar.OrExpressionContext)
}
// VisitAndExpression handles AND expressions
func (v *WhereClauseVisitor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
func (v *filterExpressionVisitor) VisitAndExpression(ctx *grammar.AndExpressionContext) any {
unaryExpressions := ctx.AllUnaryExpression()
unaryExpressionConditions := make([]string, len(unaryExpressions))
@ -222,7 +204,7 @@ func (v *WhereClauseVisitor) VisitAndExpression(ctx *grammar.AndExpressionContex
}
// VisitUnaryExpression handles NOT expressions
func (v *WhereClauseVisitor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
func (v *filterExpressionVisitor) VisitUnaryExpression(ctx *grammar.UnaryExpressionContext) any {
result := v.Visit(ctx.Primary()).(string)
// Check if this is a NOT expression
@ -234,7 +216,7 @@ func (v *WhereClauseVisitor) VisitUnaryExpression(ctx *grammar.UnaryExpressionCo
}
// VisitPrimary handles grouped expressions, comparisons, function calls, and full-text search
func (v *WhereClauseVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
if ctx.OrExpression() != nil {
// This is a parenthesized expression
return fmt.Sprintf("(%s)", v.Visit(ctx.OrExpression()).(string))
@ -246,14 +228,43 @@ func (v *WhereClauseVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
return v.Visit(ctx.FullText())
}
// Handle standalone key as a full text search term
// Handle standalone key/value as a full text search term
if ctx.GetChildCount() == 1 {
if v.fullTextColumn == nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"full text search is not supported",
))
return ""
}
child := ctx.GetChild(0)
if keyCtx, ok := child.(*grammar.KeyContext); ok {
// create a full text search condition on the body field
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, keyText, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
return ""
}
return cond
} else if valCtx, ok := child.(*grammar.ValueContext); ok {
var text string
if valCtx.QUOTED_TEXT() != nil {
text = trimQuotes(valCtx.QUOTED_TEXT().GetText())
} else if valCtx.NUMBER() != nil {
text = valCtx.NUMBER().GetText()
} else if valCtx.BOOL() != nil {
text = valCtx.BOOL().GetText()
} else if valCtx.KEY() != nil {
text = valCtx.KEY().GetText()
} else {
v.errors = append(v.errors, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported value type: %s", valCtx.GetText()))
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, text, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
return ""
}
return cond
@ -264,9 +275,21 @@ func (v *WhereClauseVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any {
}
// VisitComparison handles all comparison operators
func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any {
func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext) any {
keys := v.Visit(ctx.Key()).([]*telemetrytypes.TelemetryFieldKey)
// this is used to skip the resource filtering on main table if
// the query may use the resources table sub-query filter
if v.skipResourceFilter {
filteredKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, key := range keys {
if key.FieldContext != telemetrytypes.FieldContextResource {
filteredKeys = append(filteredKeys, key)
}
}
keys = filteredKeys
}
// Handle EXISTS specially
if ctx.EXISTS() != nil {
op := qbtypes.FilterOperatorExists
@ -281,12 +304,23 @@ func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any
}
conds = append(conds, condition)
}
// if there is only one condition, return it directly, one less `()` wrapper
if len(conds) == 1 {
return conds[0]
}
return v.builder.Or(conds...)
}
// Handle IN clause
if ctx.InClause() != nil || ctx.NotInClause() != nil {
values := v.Visit(ctx.InClause()).([]any)
var values []any
if ctx.InClause() != nil {
values = v.Visit(ctx.InClause()).([]any)
} else if ctx.NotInClause() != nil {
values = v.Visit(ctx.NotInClause()).([]any)
}
op := qbtypes.FilterOperatorIn
if ctx.NotInClause() != nil {
op = qbtypes.FilterOperatorNotIn
@ -299,6 +333,9 @@ func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any
}
conds = append(conds, condition)
}
if len(conds) == 1 {
return conds[0]
}
return v.builder.Or(conds...)
}
@ -325,6 +362,9 @@ func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any
}
conds = append(conds, condition)
}
if len(conds) == 1 {
return conds[0]
}
return v.builder.Or(conds...)
}
@ -351,29 +391,35 @@ func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any
} else if ctx.LIKE() != nil {
op = qbtypes.FilterOperatorLike
} else if ctx.ILIKE() != nil {
op = qbtypes.FilterOperatorLike
op = qbtypes.FilterOperatorILike
} else if ctx.NOT_LIKE() != nil {
op = qbtypes.FilterOperatorNotLike
} else if ctx.NOT_ILIKE() != nil {
op = qbtypes.FilterOperatorNotLike
op = qbtypes.FilterOperatorNotILike
} else if ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorRegexp
} else if ctx.NOT() != nil && ctx.REGEXP() != nil {
op = qbtypes.FilterOperatorNotRegexp
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotRegexp
}
} else if ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorContains
} else if ctx.NOT() != nil && ctx.CONTAINS() != nil {
op = qbtypes.FilterOperatorNotContains
if ctx.NOT() != nil {
op = qbtypes.FilterOperatorNotContains
}
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build condition"))
return ""
}
conds = append(conds, condition)
}
if len(conds) == 1 {
return conds[0]
}
return v.builder.Or(conds...)
}
@ -381,17 +427,17 @@ func (v *WhereClauseVisitor) VisitComparison(ctx *grammar.ComparisonContext) any
}
// VisitInClause handles IN expressions
func (v *WhereClauseVisitor) VisitInClause(ctx *grammar.InClauseContext) any {
func (v *filterExpressionVisitor) VisitInClause(ctx *grammar.InClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitNotInClause handles NOT IN expressions
func (v *WhereClauseVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any {
func (v *filterExpressionVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValueList handles comma-separated value lists
func (v *WhereClauseVisitor) VisitValueList(ctx *grammar.ValueListContext) any {
func (v *filterExpressionVisitor) VisitValueList(ctx *grammar.ValueListContext) any {
values := ctx.AllValue()
parts := []any{}
@ -403,18 +449,34 @@ func (v *WhereClauseVisitor) VisitValueList(ctx *grammar.ValueListContext) any {
}
// VisitFullText handles standalone quoted strings for full-text search
func (v *WhereClauseVisitor) VisitFullText(ctx *grammar.FullTextContext) any {
// remove quotes from the quotedText
quotedText := strings.Trim(ctx.QUOTED_TEXT().GetText(), "\"'")
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, quotedText, v.builder)
func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) any {
var text string
if ctx.QUOTED_TEXT() != nil {
text = trimQuotes(ctx.QUOTED_TEXT().GetText())
} else if ctx.FREETEXT() != nil {
text = ctx.FREETEXT().GetText()
}
if v.fullTextColumn == nil {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"full text search is not supported",
))
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, text, v.builder)
if err != nil {
v.errors = append(v.errors, errors.WrapInternalf(err, errors.CodeInternal, "failed to build full text search condition"))
return ""
}
return cond
}
// VisitFunctionCall handles function calls like has(), hasAny(), etc.
func (v *WhereClauseVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext) any {
// Get function name based on which token is present
var functionName string
if ctx.HAS() != nil {
@ -460,10 +522,16 @@ func (v *WhereClauseVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext)
for _, key := range keys {
var fieldName string
if strings.HasPrefix(key.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldName, _ = telemetrylogs.GetBodyJSONKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
if strings.HasPrefix(key.Name, v.jsonBodyPrefix) {
fieldName, _ = v.jsonKeyToKey(context.Background(), key, qbtypes.FilterOperatorUnknown, value)
} else {
fieldName, _ = v.fieldMapper.FieldFor(context.Background(), key)
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"function `%s` supports only body JSON search",
functionName,
))
return ""
}
var cond string
@ -479,11 +547,14 @@ func (v *WhereClauseVisitor) VisitFunctionCall(ctx *grammar.FunctionCallContext)
conds = append(conds, cond)
}
if len(conds) == 1 {
return conds[0]
}
return v.builder.Or(conds...)
}
// VisitFunctionParamList handles the parameter list for function calls
func (v *WhereClauseVisitor) VisitFunctionParamList(ctx *grammar.FunctionParamListContext) any {
func (v *filterExpressionVisitor) VisitFunctionParamList(ctx *grammar.FunctionParamListContext) any {
params := ctx.AllFunctionParam()
parts := make([]any, len(params))
@ -495,7 +566,7 @@ func (v *WhereClauseVisitor) VisitFunctionParamList(ctx *grammar.FunctionParamLi
}
// VisitFunctionParam handles individual parameters in function calls
func (v *WhereClauseVisitor) VisitFunctionParam(ctx *grammar.FunctionParamContext) any {
func (v *filterExpressionVisitor) VisitFunctionParam(ctx *grammar.FunctionParamContext) any {
if ctx.Key() != nil {
return v.Visit(ctx.Key())
} else if ctx.Value() != nil {
@ -508,16 +579,16 @@ func (v *WhereClauseVisitor) VisitFunctionParam(ctx *grammar.FunctionParamContex
}
// VisitArray handles array literals
func (v *WhereClauseVisitor) VisitArray(ctx *grammar.ArrayContext) any {
func (v *filterExpressionVisitor) VisitArray(ctx *grammar.ArrayContext) any {
return v.Visit(ctx.ValueList())
}
// VisitValue handles literal values: strings, numbers, booleans
func (v *WhereClauseVisitor) VisitValue(ctx *grammar.ValueContext) any {
func (v *filterExpressionVisitor) VisitValue(ctx *grammar.ValueContext) any {
if ctx.QUOTED_TEXT() != nil {
txt := ctx.QUOTED_TEXT().GetText()
// trim quotes and return the value
return strings.Trim(txt, "\"'")
return trimQuotes(txt)
} else if ctx.NUMBER() != nil {
number, err := strconv.ParseFloat(ctx.NUMBER().GetText(), 64)
if err != nil {
@ -546,11 +617,11 @@ func (v *WhereClauseVisitor) VisitValue(ctx *grammar.ValueContext) any {
}
// VisitKey handles field/column references
func (v *WhereClauseVisitor) VisitKey(ctx *grammar.KeyContext) any {
func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.KEY().GetText())
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(ctx.GetText())
keyName := strings.TrimPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix)
keyName := strings.TrimPrefix(fieldKey.Name, v.jsonBodyPrefix)
fieldKeysForName := v.fieldKeys[keyName]
@ -558,26 +629,33 @@ func (v *WhereClauseVisitor) VisitKey(ctx *grammar.KeyContext) any {
// if there is a field with the same name as attribute/resource attribute
// Since it will ORed with the fieldKeysForName, it will not result empty
// when either of them have values
if strings.HasPrefix(fieldKey.Name, telemetrylogs.BodyJSONStringSearchPrefix) {
fieldKeysForName = append(fieldKeysForName, &fieldKey)
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" {
if keyName != "" {
fieldKeysForName = append(fieldKeysForName, &fieldKey)
}
}
// TODO(srikanthccv): do we want to return an error here?
// should we infer the type and auto-magically build a key for expression?
if len(fieldKeysForName) == 0 {
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` not found",
fieldKey.Name,
))
if strings.HasPrefix(fieldKey.Name, v.jsonBodyPrefix) && v.jsonBodyPrefix != "" && keyName == "" {
v.errors = append(v.errors, errors.NewInvalidInputf(
errors.CodeInvalidInput,
"missing key for body json search - expected key of the form `body.key` (ex: `body.status`)",
))
} else {
// TODO(srikanthccv): do we want to return an error here?
// should we infer the type and auto-magically build a key for expression?
v.errors = append(v.errors, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"key `%s` not found",
fieldKey.Name,
))
}
}
if len(fieldKeysForName) > 1 {
// this is warning state, we must have a unambiguous key
v.warnings = append(v.warnings, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
v.warnings = append(v.warnings, fmt.Sprintf(
"key `%s` is ambiguous, found %d different combinations of field context and data type: %v",
fieldKey.Name,
len(fieldKeysForName),
@ -587,3 +665,13 @@ func (v *WhereClauseVisitor) VisitKey(ctx *grammar.KeyContext) any {
return fieldKeysForName
}
func trimQuotes(txt string) string {
if len(txt) >= 2 {
if (txt[0] == '"' && txt[len(txt)-1] == '"') ||
(txt[0] == '\'' && txt[len(txt)-1] == '\'') {
return txt[1 : len(txt)-1]
}
}
return txt
}

View File

@ -3,6 +3,7 @@ package telemetrylogs
import (
"context"
"fmt"
"slices"
"strings"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
@ -20,7 +21,7 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
}
func (c *conditionBuilder) ConditionFor(
func (c *conditionBuilder) conditionFor(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
@ -44,6 +45,16 @@ func (c *conditionBuilder) ConditionFor(
tblFieldName, value = telemetrytypes.DataTypeCollisionHandledFieldName(key, value, tblFieldName)
// make use of case insensitive index for body
if tblFieldName == "body" {
switch operator {
case qbtypes.FilterOperatorLike:
return sb.ILike(tblFieldName, value), nil
case qbtypes.FilterOperatorNotLike:
return sb.NotILike(tblFieldName, value), nil
}
}
// regular operators
switch operator {
// regular operators
@ -76,11 +87,9 @@ func (c *conditionBuilder) ConditionFor(
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
exp := fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
return sb.And(exp), nil
return fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
exp := fmt.Sprintf(`not match(%s, %s)`, tblFieldName, sb.Var(value))
return sb.And(exp), nil
return fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value)), nil
// between and not between
case qbtypes.FilterOperatorBetween:
values, ok := value.([]any)
@ -107,18 +116,37 @@ func (c *conditionBuilder) ConditionFor(
if !ok {
return "", qbtypes.ErrInValues
}
return sb.In(tblFieldName, values...), nil
// instead of using IN, we use `=` + `OR` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.E(tblFieldName, value))
}
return sb.Or(conditions...), nil
case qbtypes.FilterOperatorNotIn:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrInValues
}
return sb.NotIn(tblFieldName, values...), nil
// instead of using NOT IN, we use `!=` + `AND` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.NE(tblFieldName, value))
}
return sb.And(conditions...), nil
// exists and not exists
// in the UI based query builder, `exists` and `not exists` are used for
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
if strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
if operator == qbtypes.FilterOperatorExists {
return GetBodyJSONKeyForExists(ctx, key, operator, value), nil
} else {
return "NOT " + GetBodyJSONKeyForExists(ctx, key, operator, value), nil
}
}
var value any
switch column.Type {
case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}:
@ -160,3 +188,32 @@ func (c *conditionBuilder) ConditionFor(
}
return "", fmt.Errorf("unsupported operator: %v", operator)
}
func (c *conditionBuilder) ConditionFor(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {
return "", err
}
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
// with an exception for body json search
field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(IntrinsicFields, field) && !strings.HasPrefix(key.Name, BodyJSONStringSearchPrefix) {
return condition, nil
}
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
if err != nil {
return "", err
}
return sb.And(condition, existsCondition), nil
}
return condition, nil
}

View File

@ -20,6 +20,7 @@ func TestConditionFor(t *testing.T) {
operator qbtypes.FilterOperator
value any
expectedSQL string
expectedArgs []any
expectedError error
}{
{
@ -31,6 +32,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorEqual,
value: "error message",
expectedSQL: "body = ?",
expectedArgs: []any{"error message"},
expectedError: nil,
},
{
@ -42,6 +44,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotEqual,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <> ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -53,7 +56,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorGreaterThan,
value: float64(100),
expectedSQL: "attributes_number['request.duration'] > ?",
expectedSQL: "(attributes_number['request.duration'] > ? AND mapContains(attributes_number, 'request.duration') = ?)",
expectedArgs: []any{float64(100), true},
expectedError: nil,
},
{
@ -65,7 +69,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorLessThan,
value: float64(1024),
expectedSQL: "attributes_number['request.size'] < ?",
expectedSQL: "(attributes_number['request.size'] < ? AND mapContains(attributes_number, 'request.size') = ?)",
expectedArgs: []any{float64(1024), true},
expectedError: nil,
},
{
@ -77,6 +82,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorGreaterThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp >= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -88,6 +94,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorLessThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -98,7 +105,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorLike,
value: "%error%",
expectedSQL: "body LIKE ?",
expectedSQL: "LOWER(body) LIKE LOWER(?)",
expectedArgs: []any{"%error%"},
expectedError: nil,
},
{
@ -109,7 +117,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorNotLike,
value: "%error%",
expectedSQL: "body NOT LIKE ?",
expectedSQL: "LOWER(body) NOT LIKE LOWER(?)",
expectedArgs: []any{"%error%"},
expectedError: nil,
},
{
@ -121,7 +130,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorILike,
value: "%admin%",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedSQL: "(LOWER(attributes_string['user.id']) LIKE LOWER(?) AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
@ -134,6 +144,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotILike,
value: "%admin%",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) NOT LIKE LOWER(?)",
expectedArgs: []any{"%admin%"},
expectedError: nil,
},
{
@ -145,7 +156,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorContains,
value: "admin",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedSQL: "(LOWER(attributes_string['user.id']) LIKE LOWER(?) AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
@ -157,6 +169,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
@ -190,6 +203,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp NOT BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
@ -200,7 +214,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorIn,
value: []any{"error", "fatal", "critical"},
expectedSQL: "severity_text IN (?, ?, ?)",
expectedSQL: "(severity_text = ? OR severity_text = ? OR severity_text = ?)",
expectedArgs: []any{"error", "fatal", "critical"},
expectedError: nil,
},
{
@ -222,7 +237,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorNotIn,
value: []any{"debug", "info", "trace"},
expectedSQL: "severity_text NOT IN (?, ?, ?)",
expectedSQL: "(severity_text <> ? AND severity_text <> ? AND severity_text <> ?)",
expectedArgs: []any{"debug", "info", "trace"},
expectedError: nil,
},
{
@ -234,6 +250,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorExists,
value: nil,
expectedSQL: "body <> ?",
expectedArgs: []any{""},
expectedError: nil,
},
{
@ -245,6 +262,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotExists,
value: nil,
expectedSQL: "body = ?",
expectedArgs: []any{""},
expectedError: nil,
},
{
@ -256,6 +274,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorExists,
value: nil,
expectedSQL: "timestamp <> ?",
expectedArgs: []any{0},
expectedError: nil,
},
{
@ -268,6 +287,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorExists,
value: nil,
expectedSQL: "mapContains(attributes_string, 'user.id') = ?",
expectedArgs: []any{true},
expectedError: nil,
},
{
@ -280,6 +300,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotExists,
value: nil,
expectedSQL: "mapContains(attributes_string, 'user.id') <> ?",
expectedArgs: []any{true},
expectedError: nil,
},
{
@ -308,8 +329,9 @@ func TestConditionFor(t *testing.T) {
assert.Equal(t, tc.expectedError, err)
} else {
require.NoError(t, err)
sql, _ := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
assert.Contains(t, sql, tc.expectedSQL)
assert.Equal(t, tc.expectedArgs, args)
}
})
}
@ -324,6 +346,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
operator qbtypes.FilterOperator
value any
expectedSQL string
expectedArgs []any
expectedError error
}{
{
@ -341,6 +364,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
operator: qbtypes.FilterOperatorEqual,
value: "error message",
expectedSQL: "body = ? AND severity_text = ?",
expectedArgs: []any{"error message", "error message"},
expectedError: nil,
},
}
@ -389,7 +413,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorEqual,
value: 200,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') = ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ?`,
expectedError: nil,
},
{
@ -399,7 +423,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorEqual,
value: 405.5,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.duration_ms'), 'Float64') = ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."duration_ms"'), 'Float64') = ?`,
expectedError: nil,
},
{
@ -409,7 +433,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorEqual,
value: "GET",
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.method'), 'String') = ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."method"'), 'String') = ?`,
expectedError: nil,
},
{
@ -419,7 +443,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorEqual,
value: true,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.success'), 'Bool') = ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."success"'), 'Bool') = ?`,
expectedError: nil,
},
{
@ -429,7 +453,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorExists,
value: nil,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'String') <> ?",
expectedSQL: `JSON_EXISTS(body, '$."http"."status_code"')`,
expectedError: nil,
},
{
@ -439,7 +463,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorNotExists,
value: nil,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'String') = ?",
expectedSQL: `NOT JSON_EXISTS(body, '$."http"."status_code"')`,
expectedError: nil,
},
{
@ -449,7 +473,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorGreaterThan,
value: "200",
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') > ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') > ?`,
expectedError: nil,
},
{
@ -459,7 +483,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorGreaterThan,
value: 200,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') > ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') > ?`,
expectedError: nil,
},
{
@ -469,7 +493,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorLessThan,
value: "300",
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') < ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') < ?`,
expectedError: nil,
},
{
@ -479,7 +503,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorLessThan,
value: 300,
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') < ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') < ?`,
expectedError: nil,
},
{
@ -489,7 +513,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorContains,
value: "200",
expectedSQL: "LOWER(JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'String')) LIKE LOWER(?)",
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) LIKE LOWER(?)`,
expectedError: nil,
},
{
@ -499,7 +523,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorNotContains,
value: "200",
expectedSQL: "LOWER(JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'String')) NOT LIKE LOWER(?)",
expectedSQL: `LOWER(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'String')) NOT LIKE LOWER(?)`,
expectedError: nil,
},
{
@ -509,7 +533,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorBetween,
value: []any{"200", "300"},
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') BETWEEN ? AND ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') BETWEEN ? AND ?`,
expectedError: nil,
},
{
@ -519,7 +543,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorBetween,
value: []any{400, 500},
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') BETWEEN ? AND ?",
expectedSQL: `JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') BETWEEN ? AND ?`,
expectedError: nil,
},
{
@ -529,7 +553,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorIn,
value: []any{"200", "300"},
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') IN (?, ?)",
expectedSQL: `(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ? OR JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ?)`,
expectedError: nil,
},
{
@ -539,7 +563,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
},
operator: qbtypes.FilterOperatorIn,
value: []any{401, 404, 500},
expectedSQL: "JSONExtract(JSON_VALUE(body, '$.http.status_code'), 'Int64') IN (?, ?, ?)",
expectedSQL: `(JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ? OR JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ? OR JSONExtract(JSON_VALUE(body, '$."http"."status_code"'), 'Int64') = ?)`,
expectedError: nil,
},
}

View File

@ -2,4 +2,5 @@ package telemetrylogs
var (
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = []string{"timestamp", "body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number"}
)

View File

@ -68,22 +68,36 @@ func inferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
return valueType, value
}
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
parts := strings.Split(key.Name, ".")[1:]
newParts := []string{}
for _, part := range parts {
if strings.HasSuffix(part, "[*]") {
newParts = append(newParts, fmt.Sprintf(`"%s"[*]`, strings.TrimSuffix(part, "[*]")))
} else {
newParts = append(newParts, fmt.Sprintf(`"%s"`, part))
}
}
return strings.Join(newParts, ".")
}
func GetBodyJSONKey(_ context.Context, key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, any) {
dataType, value := inferDataType(value, operator, key)
// all body json keys are of the form body.
path := strings.Join(strings.Split(key.Name, ".")[1:], ".")
// for array types, we need to extract the value from the JSON_QUERY
if dataType == telemetrytypes.FieldDataTypeArrayInt64 ||
dataType == telemetrytypes.FieldDataTypeArrayFloat64 ||
dataType == telemetrytypes.FieldDataTypeArrayString ||
dataType == telemetrytypes.FieldDataTypeArrayBool ||
dataType == telemetrytypes.FieldDataTypeArrayNumber {
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
return fmt.Sprintf("JSONExtract(JSON_QUERY(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
}
// for all other types, we need to extract the value from the JSON_VALUE
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", path, dataType.CHDataType()), value
return fmt.Sprintf("JSONExtract(JSON_VALUE(body, '$.%s'), '%s')", getBodyJSONPath(key), dataType.CHDataType()), value
}
func GetBodyJSONKeyForExists(_ context.Context, key *telemetrytypes.TelemetryFieldKey, _ qbtypes.FilterOperator, _ any) string {
return fmt.Sprintf("JSON_EXISTS(body, '$.%s')", getBodyJSONPath(key))
}

View File

@ -76,7 +76,7 @@ func (c *conditionBuilder) ConditionFor(
case qbtypes.FilterOperatorRegexp:
cond = fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
case qbtypes.FilterOperatorNotRegexp:
cond = fmt.Sprintf(`not match(%s, %s)`, tblFieldName, sb.Var(value))
cond = fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value))
// in and not in
case qbtypes.FilterOperatorIn:
@ -84,13 +84,23 @@ func (c *conditionBuilder) ConditionFor(
if !ok {
return "", qbtypes.ErrInValues
}
cond = sb.In(tblFieldName, values...)
// instead of using IN, we use `=` + `OR` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.E(tblFieldName, value))
}
cond = sb.Or(conditions...)
case qbtypes.FilterOperatorNotIn:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrInValues
}
cond = sb.NotIn(tblFieldName, values...)
// instead of using NOT IN, we use `!=` + `AND` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.NE(tblFieldName, value))
}
cond = sb.And(conditions...)
// exists and not exists
// in the query builder, `exists` and `not exists` are used for

View File

@ -0,0 +1,98 @@
package telemetrytests
import (
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/require"
)
// TestAggRewrite tests rewrite set of aggregation expressions
func TestAggRewrite(t *testing.T) {
fm := telemetrytraces.NewFieldMapper()
cb := telemetrytraces.NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
opts := querybuilder.AggExprRewriterOptions{
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: telemetrylogs.GetBodyJSONKey,
RateInterval: 60,
}
testCases := []struct {
expr string
shouldPass bool
expectedExpr string
expectedArgs []any
expectedErrorContains string
}{
{
expr: "count()",
shouldPass: true,
expectedExpr: "count()",
},
{
expr: `countIf(service.name = "redis")`,
shouldPass: true,
expectedExpr: "countIf((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))",
expectedArgs: []any{"redis", true},
},
{
expr: `countIf(service.name = "redis" AND status = 200)`,
shouldPass: true,
expectedExpr: "countIf(((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_number['status'] = ? AND mapContains(attributes_number, 'status') = ?)))",
expectedArgs: []any{"redis", true, float64(200), true},
},
{
expr: `p05(duration_nano)`,
shouldPass: true,
expectedExpr: "quantile(0.05)(duration_nano)",
},
{
expr: `rate()`,
shouldPass: true,
expectedExpr: "count()/60",
},
{
expr: `avg(duration_nano)`,
shouldPass: true,
expectedExpr: "avg(duration_nano)",
},
{
expr: `sum(total_orders)`,
shouldPass: true,
expectedExpr: "sum(attributes_number['total_orders'])",
},
}
rewriter := querybuilder.NewAggExprRewriter(opts)
for _, tc := range testCases {
t.Run(limitString(tc.expr, 50), func(t *testing.T) {
expr, args, err := rewriter.Rewrite(tc.expr)
if tc.shouldPass {
if err != nil {
t.Errorf("Failed to parse query: %s\nError: %v\n", tc.expr, err)
return
}
// Build the SQL and print it for debugging
require.Equal(t, tc.expectedExpr, expr)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err, "Expected error for query: %s", tc.expr)
require.Contains(t, err.Error(), tc.expectedErrorContains)
}
})
}
}

View File

@ -0,0 +1,190 @@
package telemetrytests
import (
"fmt"
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"github.com/stretchr/testify/require"
)
// TestFilterExprLogsBodyJSON tests a comprehensive set of query patterns for body JSON search
func TestFilterExprLogsBodyJSON(t *testing.T) {
fm := telemetrylogs.NewFieldMapper()
cb := telemetrylogs.NewConditionBuilder(fm)
// Define a comprehensive set of field keys to support all test cases
keys := buildCompleteFieldKeyMap()
opts := querybuilder.FilterExprVisitorOpts{
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
Name: "body",
},
JsonBodyPrefix: "body",
JsonKeyToKey: telemetrylogs.GetBodyJSONKey,
}
testCases := []struct {
category string
query string
shouldPass bool
expectedQuery string
expectedArgs []any
expectedErrorContains string
}{
{
category: "json",
query: "has(body.requestor_list[*], 'index_service')",
shouldPass: true,
expectedQuery: `WHERE has(JSONExtract(JSON_QUERY(body, '$."requestor_list"[*]'), 'Array(String)'), ?)`,
expectedArgs: []any{"index_service"},
expectedErrorContains: "",
},
{
category: "json",
query: "has(body.int_numbers[*], 2)",
shouldPass: true,
expectedQuery: `WHERE has(JSONExtract(JSON_QUERY(body, '$."int_numbers"[*]'), 'Array(Float64)'), ?)`,
expectedArgs: []any{float64(2)},
expectedErrorContains: "",
},
{
category: "json",
query: "has(body.bool[*], true)",
shouldPass: true,
expectedQuery: `WHERE has(JSONExtract(JSON_QUERY(body, '$."bool"[*]'), 'Array(Bool)'), ?)`,
expectedArgs: []any{true},
expectedErrorContains: "",
},
{
category: "json",
query: "NOT has(body.nested_num[*].float_nums[*], 2.2)",
shouldPass: true,
expectedQuery: `WHERE NOT (has(JSONExtract(JSON_QUERY(body, '$."nested_num"[*]."float_nums"[*]'), 'Array(Float64)'), ?))`,
expectedArgs: []any{float64(2.2)},
expectedErrorContains: "",
},
{
category: "json",
query: "body.message = hello",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."message"'), 'String') = ? AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"hello"},
expectedErrorContains: "",
},
{
category: "json",
query: "body.status = 1",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."status"'), 'Float64') = ? AND JSON_EXISTS(body, '$."status"'))`,
expectedArgs: []any{float64(1)},
expectedErrorContains: "",
},
{
category: "json",
query: "body.status = 1.1",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."status"'), 'Float64') = ? AND JSON_EXISTS(body, '$."status"'))`,
expectedArgs: []any{float64(1.1)},
expectedErrorContains: "",
},
{
category: "json",
query: "body.boolkey = true",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."boolkey"'), 'Bool') = ? AND JSON_EXISTS(body, '$."boolkey"'))`,
expectedArgs: []any{true},
expectedErrorContains: "",
},
{
category: "json",
query: "body.status > 200",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."status"'), 'Float64') > ? AND JSON_EXISTS(body, '$."status"'))`,
expectedArgs: []any{float64(200)},
expectedErrorContains: "",
},
{
category: "json",
query: "body.message REGEXP 'a*'",
shouldPass: true,
expectedQuery: `WHERE (match(JSONExtract(JSON_VALUE(body, '$."message"'), 'String'), ?) AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"a*"},
expectedErrorContains: "",
},
{
category: "json",
query: `body.message CONTAINS "hello 'world'"`,
shouldPass: true,
expectedQuery: `WHERE (LOWER(JSONExtract(JSON_VALUE(body, '$."message"'), 'String')) LIKE LOWER(?) AND JSON_EXISTS(body, '$."message"'))`,
expectedArgs: []any{"%hello 'world'%"},
expectedErrorContains: "",
},
{
category: "json",
query: `body.message EXISTS`,
shouldPass: true,
expectedQuery: `WHERE JSON_EXISTS(body, '$."message"')`,
expectedErrorContains: "",
},
{
category: "json",
query: `body.name IN ('hello', 'world')`,
shouldPass: true,
expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ? OR JSONExtract(JSON_VALUE(body, '$."name"'), 'String') = ?) AND JSON_EXISTS(body, '$."name"'))`,
expectedArgs: []any{"hello", "world"},
expectedErrorContains: "",
},
{
category: "json",
query: `body.value IN (200, 300)`,
shouldPass: true,
expectedQuery: `WHERE ((JSONExtract(JSON_VALUE(body, '$."value"'), 'Float64') = ? OR JSONExtract(JSON_VALUE(body, '$."value"'), 'Float64') = ?) AND JSON_EXISTS(body, '$."value"'))`,
expectedArgs: []any{float64(200), float64(300)},
expectedErrorContains: "",
},
{
category: "json",
query: "body.key-with-hyphen = true",
shouldPass: true,
expectedQuery: `WHERE (JSONExtract(JSON_VALUE(body, '$."key-with-hyphen"'), 'Bool') = ? AND JSON_EXISTS(body, '$."key-with-hyphen"'))`,
expectedArgs: []any{true},
expectedErrorContains: "",
},
}
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, _, err := querybuilder.PrepareWhereClause(tc.query, opts)
if tc.shouldPass {
if err != nil {
t.Errorf("Failed to parse query: %s\nError: %v\n", tc.query, err)
return
}
if clause == nil {
t.Errorf("Expected clause for query: %s\n", tc.query)
return
}
// Build the SQL and print it for debugging
sql, args := clause.BuildWithFlavor(sqlbuilder.ClickHouse)
require.Equal(t, tc.expectedQuery, sql)
require.Equal(t, tc.expectedArgs, args)
} else {
require.Error(t, err, "Expected error for query: %s", tc.query)
require.Contains(t, err.Error(), tc.expectedErrorContains)
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,859 @@
package telemetrytests
import (
"strings"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// Helper function to limit string length for display
func limitString(s string, maxLen int) string {
s = strings.ReplaceAll(s, "\n", " ")
s = strings.ReplaceAll(s, "\t", " ")
if len(s) <= maxLen {
return s
}
return s[:maxLen-3] + "..."
}
// Function to build a complete field key map for testing all scenarios
func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
return map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {
{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.status_code": {
{
Name: "http.status_code",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"status": {
{
Name: "status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"code": {
{
Name: "code",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"count": {
{
Name: "count",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"duration": {
{
Name: "duration",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"message": {
{
Name: "message",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"path": {
{
Name: "path",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"email": {
{
Name: "email",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"filename": {
{
Name: "filename",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"amount": {
{
Name: "amount",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"error.code": {
{
Name: "error.code",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"environment": {
{
Name: "environment",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.id": {
{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"metadata.version": {
{
Name: "metadata.version",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"request.headers.authorization": {
{
Name: "request.headers.authorization",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"response.body.data": {
{
Name: "response.body.data",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"version": {
{
Name: "version",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"response.headers": {
{
Name: "response.headers",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"request.query_params": {
{
Name: "request.query_params",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"level": {
{
Name: "level",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.status": {
{
Name: "user.status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"isEnabled": {
{
Name: "isEnabled",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"isDisabled": {
{
Name: "isDisabled",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"is_valid": {
{
Name: "is_valid",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"is_invalid": {
{
Name: "is_invalid",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"key": {
{
Name: "key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"status.code": {
{
Name: "status.code",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
// Special fields for tests
"value": {
{
Name: "value",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"threshold": {
{
Name: "threshold",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"warning_threshold": {
{
Name: "warning_threshold",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"critical_threshold": {
{
Name: "critical_threshold",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"type": {
{
Name: "type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"age": {
{
Name: "age",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"user.email": {
{
Name: "user.email",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.name": {
{
Name: "user.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.profile.name": {
{
Name: "user.profile.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"region": {
{
Name: "region",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.type": {
{
Name: "service.type",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"service.deprecated": {
{
Name: "service.deprecated",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"is_automated_test": {
{
Name: "is_automated_test",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"severity": {
{
Name: "severity",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"created_at": {
{
Name: "created_at",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
},
"is_deleted": {
{
Name: "is_deleted",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"customer.type": {
{
Name: "customer.type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"total_orders": {
{
Name: "total_orders",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"total_spent": {
{
Name: "total_spent",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"items[].product.category": {
{
Name: "items[].product.category",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"items[].license_type": {
{
Name: "items[].license_type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"first_name": {
{
Name: "first_name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"last_name": {
{
Name: "last_name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"address.country": {
{
Name: "address.country",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"address.state": {
{
Name: "address.state",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"address.city": {
{
Name: "address.city",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"subscription.plan": {
{
Name: "subscription.plan",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"subscription.status": {
{
Name: "subscription.status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"is_expected": {
{
Name: "is_expected",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"http.method": {
{
Name: "http.method",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.path": {
{
Name: "http.path",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"http.status": {
{
Name: "http.status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"response.body.error": {
{
Name: "response.body.error",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"metric.name": {
{
Name: "metric.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"metric.value": {
{
Name: "metric.value",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"metric.rate_of_change": {
{
Name: "metric.rate_of_change",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"resource.type": {
{
Name: "resource.type",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"resource.environment": {
{
Name: "resource.environment",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"resource.is_critical": {
{
Name: "resource.is_critical",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"metric.is_monitored": {
{
Name: "metric.is_monitored",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"aggregation.window": {
{
Name: "aggregation.window",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"aggregation.function": {
{
Name: "aggregation.function",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"action": {
{
Name: "action",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"host": {
{
Name: "host",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"metric.type": {
{
Name: "metric.type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"rate": {
{
Name: "rate",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"delta": {
{
Name: "delta",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"alerting": {
{
Name: "alerting",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"aggregation": {
{
Name: "aggregation",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.created_at": {
{
Name: "user.created_at",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
},
"user.type": {
{
Name: "user.type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.last_login": {
{
Name: "user.last_login",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
},
"user.failed_logins": {
{
Name: "user.failed_logins",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"user.department": {
{
Name: "user.department",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.region": {
{
Name: "user.region",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"user.quota": {
{
Name: "user.quota",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"users[].role": {
{
Name: "users[].role",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"users[].status": {
{
Name: "users[].status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"orders[].items[].product.id": {
{
Name: "orders[].items[].product.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"data.metrics[].value": {
{
Name: "data.metrics[].value",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
},
},
"data.metrics[].name": {
{
Name: "data.metrics[].name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"requests[].response.status": {
{
Name: "requests[].response.status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
// Unicode characters in keys
"école.name": {
{
Name: "école.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"straße.name": {
{
Name: "straße.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"日本語.text": {
{
Name: "日本語.text",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"россия.capital": {
{
Name: "россия.capital",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Special characters in keys
"special-key": {
{
Name: "special-key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"special.key": {
{
Name: "special.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"special_key": {
{
Name: "special_key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"special:key": {
{
Name: "special:key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"key-with-dashes": {
{
Name: "key-with-dashes",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"key_with_underscore": {
{
Name: "key_with_underscore",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Nested paths with dots
"and.value": {
{
Name: "and.value",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"or.status": {
{
Name: "or.status",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"not.enabled": {
{
Name: "not.enabled",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"like.pattern": {
{
Name: "like.pattern",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"between.min": {
{
Name: "between.min",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"between.max": {
{
Name: "between.max",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"exists.flag": {
{
Name: "exists.flag",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeBool,
},
},
"contains.text": {
{
Name: "contains.text",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"items[0].name": {
{
Name: "items[0].name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"errors[].code": {
{
Name: "errors[].code",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"metadata.dimensions": {
{
Name: "metadata.dimensions",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"metadata.dimensions.width": {
{
Name: "metadata.dimensions.width",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"subscription_type": {
{
Name: "subscription_type",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Resource-related fields from original example
"resource.k8s.namespace.name": {
{
Name: "resource.k8s.namespace.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Add location field for Unicode test
"location": {
{
Name: "location",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Add description field for Unicode test
"description": {
{
Name: "description",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
// Add query field for special characters test
"query": {
{
Name: "query",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"materialized.key.name": {
{
Name: "materialized.key.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
}
}

View File

@ -3,6 +3,7 @@ package telemetrytraces
import (
"context"
"fmt"
"slices"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@ -20,7 +21,7 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
}
func (c *conditionBuilder) ConditionFor(
func (c *conditionBuilder) conditionFor(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
@ -73,11 +74,9 @@ func (c *conditionBuilder) ConditionFor(
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
exp := fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value))
return sb.And(exp), nil
return fmt.Sprintf(`match(%s, %s)`, tblFieldName, sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
exp := fmt.Sprintf(`not match(%s, %s)`, tblFieldName, sb.Var(value))
return sb.And(exp), nil
return fmt.Sprintf(`NOT match(%s, %s)`, tblFieldName, sb.Var(value)), nil
// between and not between
case qbtypes.FilterOperatorBetween:
@ -105,13 +104,23 @@ func (c *conditionBuilder) ConditionFor(
if !ok {
return "", qbtypes.ErrInValues
}
return sb.In(tblFieldName, values...), nil
// instead of using IN, we use `=` + `OR` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.E(tblFieldName, value))
}
return sb.Or(conditions...), nil
case qbtypes.FilterOperatorNotIn:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrInValues
}
return sb.NotIn(tblFieldName, values...), nil
// instead of using NOT IN, we use `!=` + `AND` to make use of index
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.NE(tblFieldName, value))
}
return sb.And(conditions...), nil
// exists and not exists
// in the query builder, `exists` and `not exists` are used for
@ -166,3 +175,31 @@ func (c *conditionBuilder) ConditionFor(
}
return "", nil
}
func (c *conditionBuilder) ConditionFor(
ctx context.Context,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {
return "", err
}
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(IntrinsicFields, field) || slices.Contains(CalculatedFields, field) {
return condition, nil
}
existsCondition, err := c.conditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
if err != nil {
return "", err
}
return sb.And(condition, existsCondition), nil
}
return condition, nil
}

View File

@ -20,6 +20,7 @@ func TestConditionFor(t *testing.T) {
operator qbtypes.FilterOperator
value any
expectedSQL string
expectedArgs []any
expectedError error
}{
{
@ -31,6 +32,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotEqual,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <> ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -42,7 +44,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorGreaterThan,
value: float64(100),
expectedSQL: "attributes_number['request.duration'] > ?",
expectedSQL: "(attributes_number['request.duration'] > ? AND mapContains(attributes_number, 'request.duration') = ?)",
expectedArgs: []any{float64(100), true},
expectedError: nil,
},
{
@ -54,7 +57,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorLessThan,
value: float64(1024),
expectedSQL: "attributes_number['request.size'] < ?",
expectedSQL: "(attributes_number['request.size'] < ? AND mapContains(attributes_number, 'request.size') = ?)",
expectedArgs: []any{float64(1024), true},
expectedError: nil,
},
{
@ -66,6 +70,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorGreaterThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp >= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -77,6 +82,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorLessThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
@ -88,7 +94,8 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorILike,
value: "%admin%",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedSQL: "(LOWER(attributes_string['user.id']) LIKE LOWER(?) AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
@ -101,6 +108,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotILike,
value: "%admin%",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) NOT LIKE LOWER(?)",
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
@ -112,6 +120,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
@ -145,6 +154,7 @@ func TestConditionFor(t *testing.T) {
operator: qbtypes.FilterOperatorNotBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp NOT BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
@ -180,7 +190,34 @@ func TestConditionFor(t *testing.T) {
},
operator: qbtypes.FilterOperatorContains,
value: "admin",
expectedSQL: "WHERE LOWER(attributes_string['user.id']) LIKE LOWER(?)",
expectedSQL: "(LOWER(attributes_string['user.id']) LIKE LOWER(?) AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
name: "In operator - map field",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorIn,
value: []any{"admin", "user"},
expectedSQL: "((attributes_string['user.id'] = ? OR attributes_string['user.id'] = ?) AND mapContains(attributes_string, 'user.id') = ?)",
expectedArgs: []any{"admin", "user", true},
expectedError: nil,
},
{
name: "Not In operator - map field",
key: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
operator: qbtypes.FilterOperatorNotIn,
value: []any{"admin", "user"},
expectedSQL: "(attributes_string['user.id'] <> ? AND attributes_string['user.id'] <> ?)",
expectedArgs: []any{"admin", "user", true},
expectedError: nil,
},
{

View File

@ -0,0 +1,51 @@
package telemetrytraces
var (
IntrinsicFields = []string{
"timestamp",
"trace_id",
"span_id",
"trace_state",
"parent_span_id",
"flags",
"name",
"kind",
"kind_string",
"duration_nano",
"status_code",
"status_message",
"status_code_string",
"traceID",
"spanID",
"parentSpanID",
"spanKind",
"durationNano",
"statusCode",
"statusMessage",
"statusCodeString",
}
CalculatedFields = []string{
"response_status_code",
"external_http_url",
"http_url",
"external_http_method",
"http_method",
"http_host",
"db_name",
"db_operation",
"has_error",
"is_remote",
"responseStatusCode",
"externalHttpUrl",
"httpUrl",
"externalHttpMethod",
"httpMethod",
"httpHost",
"dbName",
"dbOperation",
"hasError",
"isRemote",
}
)

View File

@ -82,6 +82,35 @@ const (
FilterOperatorNotContains
)
// AddDefaultExistsFilter returns true if addl exists filter should be added to the query
// For the negative predicates, we don't want to add the exists filter. Why?
// Say for example, user adds a filter `service.name != "redis"`, we can't interpret it
// unambiguously i.e do they mean to fetch logs that satisfy
// service.name != "redis" or do they mean to fetch logs that have `service.name` field and
// doesn't have value "redis"
// Since we don't know the intent, we don't add the exists filter. They are expected
// to add exists filter themselves if exclusion is desired.
//
// For the positive predicates, the key existence is implied.
func (f FilterOperator) AddDefaultExistsFilter() bool {
switch f {
case
FilterOperatorEqual,
FilterOperatorGreaterThan,
FilterOperatorGreaterThanOrEq,
FilterOperatorLessThan,
FilterOperatorLessThanOrEq,
FilterOperatorLike,
FilterOperatorILike,
FilterOperatorBetween,
FilterOperatorIn,
FilterOperatorRegexp,
FilterOperatorContains:
return true
}
return false
}
type OrderDirection struct {
valuer.String
}

View File

@ -99,11 +99,11 @@ func GetFieldKeyFromKeyText(key string) TelemetryFieldKey {
}
func FieldKeyToMaterializedColumnName(key *TelemetryFieldKey) string {
return fmt.Sprintf("%s_%s_%s", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
return fmt.Sprintf("`%s_%s_%s`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
}
func FieldKeyToMaterializedColumnNameForExists(key *TelemetryFieldKey) string {
return fmt.Sprintf("%s_%s_%s_exists", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
return fmt.Sprintf("`%s_%s_%s_exists`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
}
type TelemetryFieldValues struct {