evaluator.tsx 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. // To evaluate a result of the search syntax, we flatten the AST,
  2. // transform it to postfix notation which gets rid of parenthesis and tokens
  3. // that do not hold any value as they cannot be evaluated and then evaluate
  4. // the postfix notation.
  5. import {
  6. BooleanOperator,
  7. Token,
  8. type TokenResult,
  9. } from 'sentry/components/searchSyntax/parser';
  10. export type ProcessedTokenResult =
  11. | TokenResult<Token>
  12. | {type: 'L_PAREN'}
  13. | {type: 'R_PAREN'};
  14. export function toFlattened(tokens: TokenResult<Token>[]): ProcessedTokenResult[] {
  15. const flattened_result: ProcessedTokenResult[] = [];
  16. function flatten(token: TokenResult<Token>): void {
  17. switch (token.type) {
  18. case Token.SPACES:
  19. case Token.VALUE_BOOLEAN:
  20. case Token.VALUE_DURATION:
  21. case Token.VALUE_ISO_8601_DATE:
  22. case Token.VALUE_SIZE:
  23. case Token.VALUE_NUMBER_LIST:
  24. case Token.VALUE_NUMBER:
  25. case Token.VALUE_TEXT:
  26. case Token.VALUE_TEXT_LIST:
  27. case Token.VALUE_RELATIVE_DATE:
  28. case Token.VALUE_PERCENTAGE:
  29. case Token.KEY_SIMPLE:
  30. return;
  31. case Token.LOGIC_GROUP:
  32. flattened_result.push({type: 'L_PAREN'});
  33. for (const child of token.inner) {
  34. // Logic groups are wrapped in parenthesis,
  35. // but those parenthesis are not actual tokens returned by the parser
  36. flatten(child);
  37. }
  38. flattened_result.push({type: 'R_PAREN'});
  39. break;
  40. case Token.LOGIC_BOOLEAN:
  41. flattened_result.push(token);
  42. break;
  43. default:
  44. flattened_result.push(token);
  45. break;
  46. }
  47. }
  48. for (let i = 0; i < tokens.length; i++) {
  49. flatten(tokens[i]);
  50. }
  51. return flattened_result;
  52. }
  53. // At this point we have a flat list of groups that we can evaluate, however since the syntax allows
  54. // implicit ANDs, we should still insert those as it will make constructing a valid AST easier
  55. export function insertImplicitAND(
  56. tokens: ProcessedTokenResult[]
  57. ): ProcessedTokenResult[] {
  58. const with_implicit_and: ProcessedTokenResult[] = [];
  59. const AND = {
  60. type: Token.LOGIC_BOOLEAN,
  61. value: BooleanOperator.AND,
  62. text: 'AND',
  63. location: null as unknown as PEG.LocationRange,
  64. invalid: null,
  65. } as TokenResult<Token>;
  66. for (let i = 0; i < tokens.length; i++) {
  67. const next = tokens[i + 1];
  68. with_implicit_and.push(tokens[i]);
  69. // If current is not a logic boolean and next is not a logic boolean, insert an implicit AND.
  70. if (
  71. next &&
  72. next.type !== Token.LOGIC_BOOLEAN &&
  73. tokens[i].type !== Token.LOGIC_BOOLEAN &&
  74. tokens[i].type !== 'L_PAREN' &&
  75. next.type !== 'R_PAREN'
  76. ) {
  77. with_implicit_and.push(AND);
  78. }
  79. }
  80. return with_implicit_and;
  81. }