lexer.test.ts 4.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. import { Lexer } from '../lexer';
  2. describe('when lexing graphite expression', () => {
  3. it('should tokenize metric expression', () => {
  4. const lexer = new Lexer('metric.test.*.asd.count');
  5. const tokens = lexer.tokenize();
  6. expect(tokens[0].value).toBe('metric');
  7. expect(tokens[1].value).toBe('.');
  8. expect(tokens[2].type).toBe('identifier');
  9. expect(tokens[4].type).toBe('identifier');
  10. expect(tokens[4].pos).toBe(13);
  11. });
  12. it('should tokenize metric expression with dash', () => {
  13. const lexer = new Lexer('metric.test.se1-server-*.asd.count');
  14. const tokens = lexer.tokenize();
  15. expect(tokens[4].type).toBe('identifier');
  16. expect(tokens[4].value).toBe('se1-server-*');
  17. });
  18. it('should tokenize metric expression with dash2', () => {
  19. const lexer = new Lexer('net.192-168-1-1.192-168-1-9.ping_value.*');
  20. const tokens = lexer.tokenize();
  21. expect(tokens[0].value).toBe('net');
  22. expect(tokens[2].value).toBe('192-168-1-1');
  23. });
  24. it('should tokenize metric expression with equal sign', () => {
  25. const lexer = new Lexer('apps=test');
  26. const tokens = lexer.tokenize();
  27. expect(tokens[0].value).toBe('apps=test');
  28. });
  29. it('simple function2', () => {
  30. const lexer = new Lexer('offset(test.metric, -100)');
  31. const tokens = lexer.tokenize();
  32. expect(tokens[2].type).toBe('identifier');
  33. expect(tokens[4].type).toBe('identifier');
  34. expect(tokens[6].type).toBe('number');
  35. });
  36. it('should tokenize metric expression with curly braces', () => {
  37. const lexer = new Lexer('metric.se1-{first, second}.count');
  38. const tokens = lexer.tokenize();
  39. expect(tokens.length).toBe(10);
  40. expect(tokens[3].type).toBe('{');
  41. expect(tokens[4].value).toBe('first');
  42. expect(tokens[5].value).toBe(',');
  43. expect(tokens[6].value).toBe('second');
  44. });
  45. it('should tokenize metric expression with number segments', () => {
  46. const lexer = new Lexer('metric.10.12_10.test');
  47. const tokens = lexer.tokenize();
  48. expect(tokens[0].type).toBe('identifier');
  49. expect(tokens[2].type).toBe('identifier');
  50. expect(tokens[2].value).toBe('10');
  51. expect(tokens[4].value).toBe('12_10');
  52. expect(tokens[4].type).toBe('identifier');
  53. });
  54. it('should tokenize metric expression with segment that start with number', () => {
  55. const lexer = new Lexer('metric.001-server');
  56. const tokens = lexer.tokenize();
  57. expect(tokens[0].type).toBe('identifier');
  58. expect(tokens[2].type).toBe('identifier');
  59. expect(tokens.length).toBe(3);
  60. });
  61. it('should tokenize func call with numbered metric and number arg', () => {
  62. const lexer = new Lexer('scale(metric.10, 15)');
  63. const tokens = lexer.tokenize();
  64. expect(tokens[0].type).toBe('identifier');
  65. expect(tokens[2].type).toBe('identifier');
  66. expect(tokens[2].value).toBe('metric');
  67. expect(tokens[4].value).toBe('10');
  68. expect(tokens[4].type).toBe('number');
  69. expect(tokens[6].type).toBe('number');
  70. });
  71. it('should tokenize metric with template parameter', () => {
  72. const lexer = new Lexer('metric.[[server]].test');
  73. const tokens = lexer.tokenize();
  74. expect(tokens[2].type).toBe('identifier');
  75. expect(tokens[2].value).toBe('[[server]]');
  76. expect(tokens[4].type).toBe('identifier');
  77. });
  78. it('should tokenize metric with question mark', () => {
  79. const lexer = new Lexer('metric.server_??.test');
  80. const tokens = lexer.tokenize();
  81. expect(tokens[2].type).toBe('identifier');
  82. expect(tokens[2].value).toBe('server_??');
  83. expect(tokens[4].type).toBe('identifier');
  84. });
  85. it('should handle error with unterminated string', () => {
  86. const lexer = new Lexer("alias(metric, 'asd)");
  87. const tokens = lexer.tokenize();
  88. expect(tokens[0].value).toBe('alias');
  89. expect(tokens[1].value).toBe('(');
  90. expect(tokens[2].value).toBe('metric');
  91. expect(tokens[3].value).toBe(',');
  92. expect(tokens[4].type).toBe('string');
  93. expect(tokens[4].isUnclosed).toBe(true);
  94. expect(tokens[4].pos).toBe(20);
  95. });
  96. it('should handle float parameters', () => {
  97. const lexer = new Lexer('alias(metric, 0.002)');
  98. const tokens = lexer.tokenize();
  99. expect(tokens[4].type).toBe('number');
  100. expect(tokens[4].value).toBe('0.002');
  101. });
  102. it('should handle bool parameters', () => {
  103. const lexer = new Lexer('alias(metric, true, false)');
  104. const tokens = lexer.tokenize();
  105. expect(tokens[4].type).toBe('bool');
  106. expect(tokens[4].value).toBe('true');
  107. expect(tokens[6].type).toBe('bool');
  108. });
  109. });