datasource.test.ts 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628
  1. import { lastValueFrom, of } from 'rxjs';
  2. import { toArray } from 'rxjs/operators';
  3. import {
  4. ArrayVector,
  5. DataFrame,
  6. dataFrameToJSON,
  7. dateTime,
  8. Field,
  9. FieldType,
  10. LogLevel,
  11. LogRowModel,
  12. MutableDataFrame,
  13. } from '@grafana/data';
  14. import { setDataSourceSrv } from '@grafana/runtime';
  15. import {
  16. dimensionVariable,
  17. expressionVariable,
  18. labelsVariable,
  19. limitVariable,
  20. logGroupNamesVariable,
  21. metricVariable,
  22. namespaceVariable,
  23. setupMockedDataSource,
  24. regionVariable,
  25. } from './__mocks__/CloudWatchDataSource';
  26. import { validLogsQuery, validMetricsQuery } from './__mocks__/queries';
  27. import { LOGSTREAM_IDENTIFIER_INTERNAL, LOG_IDENTIFIER_INTERNAL } from './datasource';
  28. import {
  29. CloudWatchLogsQueryStatus,
  30. CloudWatchMetricsQuery,
  31. CloudWatchQuery,
  32. MetricEditorMode,
  33. MetricQueryType,
  34. } from './types';
  35. describe('datasource', () => {
  36. describe('query', () => {
  37. it('should return error if log query and log groups is not specified', async () => {
  38. const { datasource } = setupMockedDataSource();
  39. const observable = datasource.query({ targets: [{ queryMode: 'Logs' as 'Logs' }] } as any);
  40. await expect(observable).toEmitValuesWith((received) => {
  41. const response = received[0];
  42. expect(response.error?.message).toBe('Log group is required');
  43. });
  44. });
  45. it('should return empty response if queries are hidden', async () => {
  46. const { datasource } = setupMockedDataSource();
  47. const observable = datasource.query({ targets: [{ queryMode: 'Logs' as 'Logs', hide: true }] } as any);
  48. await expect(observable).toEmitValuesWith((received) => {
  49. const response = received[0];
  50. expect(response.data).toEqual([]);
  51. });
  52. });
  53. const testTable: Array<{ query: CloudWatchQuery; valid: boolean }> = [
  54. { query: { ...validLogsQuery, hide: true }, valid: false },
  55. { query: { ...validLogsQuery, hide: false }, valid: true },
  56. { query: { ...validMetricsQuery, hide: true }, valid: false },
  57. { query: { ...validMetricsQuery, hide: true, id: 'queryA' }, valid: true },
  58. { query: { ...validMetricsQuery, hide: false }, valid: true },
  59. ];
  60. test.each(testTable)('should filter out hidden queries unless id is provided', ({ query, valid }) => {
  61. const { datasource } = setupMockedDataSource();
  62. expect(datasource.filterQuery(query)).toEqual(valid);
  63. });
  64. it('should interpolate variables in the query', async () => {
  65. const { datasource, fetchMock } = setupMockedDataSource();
  66. await lastValueFrom(
  67. datasource
  68. .query({
  69. targets: [
  70. {
  71. queryMode: 'Logs',
  72. region: '$region',
  73. expression: 'fields $fields',
  74. logGroupNames: ['/some/$group'],
  75. },
  76. ],
  77. } as any)
  78. .pipe(toArray())
  79. );
  80. expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
  81. queryString: 'fields templatedField',
  82. logGroupNames: ['/some/templatedGroup'],
  83. region: 'templatedRegion',
  84. });
  85. });
  86. it('should interpolate multi-value template variable for log group names in the query', async () => {
  87. const { datasource, fetchMock } = setupMockedDataSource({
  88. variables: [expressionVariable, logGroupNamesVariable, regionVariable],
  89. mockGetVariableName: false,
  90. });
  91. await lastValueFrom(
  92. datasource
  93. .query({
  94. targets: [
  95. {
  96. queryMode: 'Logs',
  97. region: '$region',
  98. expression: 'fields $fields',
  99. logGroupNames: ['$groups'],
  100. },
  101. ],
  102. } as any)
  103. .pipe(toArray())
  104. );
  105. expect(fetchMock.mock.calls[0][0].data.queries[0]).toMatchObject({
  106. queryString: 'fields templatedField',
  107. logGroupNames: ['templatedGroup-1', 'templatedGroup-2'],
  108. region: 'templatedRegion',
  109. });
  110. });
  111. it('should add links to log queries', async () => {
  112. const { datasource } = setupForLogs();
  113. const observable = datasource.query({
  114. targets: [
  115. {
  116. queryMode: 'Logs',
  117. logGroupNames: ['test'],
  118. refId: 'a',
  119. },
  120. ],
  121. } as any);
  122. const emits = await lastValueFrom(observable.pipe(toArray()));
  123. expect(emits).toHaveLength(1);
  124. expect(emits[0].data[0].fields.find((f: Field) => f.name === '@xrayTraceId').config.links).toMatchObject([
  125. {
  126. title: 'Xray',
  127. url: '',
  128. internal: {
  129. query: { query: '${__value.raw}', region: 'us-west-1', queryType: 'getTrace' },
  130. datasourceUid: 'xray',
  131. datasourceName: 'Xray',
  132. },
  133. },
  134. ]);
  135. expect(emits[0].data[0].fields.find((f: Field) => f.name === '@message').config.links).toMatchObject([
  136. {
  137. title: 'View in CloudWatch console',
  138. url: "https://us-west-1.console.aws.amazon.com/cloudwatch/home?region=us-west-1#logs-insights:queryDetail=~(end~'2020-12-31T19*3a00*3a00.000Z~start~'2020-12-31T19*3a00*3a00.000Z~timeType~'ABSOLUTE~tz~'UTC~editorString~'~isLiveTail~false~source~(~'test))",
  139. },
  140. ]);
  141. });
  142. describe('debouncedCustomAlert', () => {
  143. const debouncedAlert = jest.fn();
  144. beforeEach(() => {
  145. const { datasource } = setupMockedDataSource({
  146. variables: [
  147. { ...namespaceVariable, multi: true },
  148. { ...metricVariable, multi: true },
  149. ],
  150. });
  151. datasource.debouncedCustomAlert = debouncedAlert;
  152. datasource.performTimeSeriesQuery = jest.fn().mockResolvedValue([]);
  153. datasource.query({
  154. targets: [
  155. {
  156. queryMode: 'Metrics',
  157. id: '',
  158. region: 'us-east-2',
  159. namespace: namespaceVariable.id,
  160. metricName: metricVariable.id,
  161. period: '',
  162. alias: '',
  163. dimensions: {},
  164. matchExact: true,
  165. statistic: '',
  166. refId: '',
  167. expression: 'x * 2',
  168. metricQueryType: MetricQueryType.Search,
  169. metricEditorMode: MetricEditorMode.Code,
  170. },
  171. ],
  172. } as any);
  173. });
  174. it('should show debounced alert for namespace and metric name', async () => {
  175. expect(debouncedAlert).toHaveBeenCalledWith(
  176. 'CloudWatch templating error',
  177. 'Multi template variables are not supported for namespace'
  178. );
  179. expect(debouncedAlert).toHaveBeenCalledWith(
  180. 'CloudWatch templating error',
  181. 'Multi template variables are not supported for metric name'
  182. );
  183. });
  184. it('should not show debounced alert for region', async () => {
  185. expect(debouncedAlert).not.toHaveBeenCalledWith(
  186. 'CloudWatch templating error',
  187. 'Multi template variables are not supported for region'
  188. );
  189. });
  190. });
  191. });
  192. describe('filterMetricsQuery', () => {
  193. const datasource = setupMockedDataSource().datasource;
  194. let baseQuery: CloudWatchMetricsQuery;
  195. beforeEach(() => {
  196. baseQuery = {
  197. id: '',
  198. region: 'us-east-2',
  199. namespace: '',
  200. period: '',
  201. alias: '',
  202. metricName: '',
  203. dimensions: {},
  204. matchExact: true,
  205. statistic: '',
  206. expression: '',
  207. refId: '',
  208. };
  209. });
  210. it('should error if invalid mode', async () => {
  211. expect(() => datasource.filterMetricQuery(baseQuery)).toThrowError('invalid metric editor mode');
  212. });
  213. describe('metric search queries', () => {
  214. beforeEach(() => {
  215. baseQuery = {
  216. ...baseQuery,
  217. namespace: 'AWS/EC2',
  218. metricName: 'CPUUtilization',
  219. statistic: 'Average',
  220. metricQueryType: MetricQueryType.Search,
  221. metricEditorMode: MetricEditorMode.Builder,
  222. };
  223. });
  224. it('should not allow builder queries that dont have namespace, metric or statistic', async () => {
  225. expect(datasource.filterMetricQuery({ ...baseQuery, statistic: undefined })).toBeFalsy();
  226. expect(datasource.filterMetricQuery({ ...baseQuery, metricName: undefined })).toBeFalsy();
  227. expect(datasource.filterMetricQuery({ ...baseQuery, namespace: '' })).toBeFalsy();
  228. });
  229. it('should allow builder queries that have namespace, metric or statistic', async () => {
  230. expect(datasource.filterMetricQuery(baseQuery)).toBeTruthy();
  231. });
  232. it('should not allow code queries that dont have an expression', async () => {
  233. expect(
  234. datasource.filterMetricQuery({
  235. ...baseQuery,
  236. expression: undefined,
  237. metricEditorMode: MetricEditorMode.Code,
  238. })
  239. ).toBeFalsy();
  240. });
  241. it('should allow code queries that have an expression', async () => {
  242. expect(
  243. datasource.filterMetricQuery({ ...baseQuery, expression: 'x * 2', metricEditorMode: MetricEditorMode.Code })
  244. ).toBeTruthy();
  245. });
  246. });
  247. describe('metric search expression queries', () => {
  248. beforeEach(() => {
  249. baseQuery = {
  250. ...baseQuery,
  251. metricQueryType: MetricQueryType.Search,
  252. metricEditorMode: MetricEditorMode.Code,
  253. };
  254. });
  255. it('should not allow queries that dont have an expression', async () => {
  256. const valid = datasource.filterMetricQuery(baseQuery);
  257. expect(valid).toBeFalsy();
  258. });
  259. it('should allow queries that have an expression', async () => {
  260. baseQuery.expression = 'SUM([a,x])';
  261. const valid = datasource.filterMetricQuery(baseQuery);
  262. expect(valid).toBeTruthy();
  263. });
  264. });
  265. describe('metric query queries', () => {
  266. beforeEach(() => {
  267. baseQuery = {
  268. ...baseQuery,
  269. metricQueryType: MetricQueryType.Query,
  270. metricEditorMode: MetricEditorMode.Code,
  271. };
  272. });
  273. it('should not allow queries that dont have a sql expresssion', async () => {
  274. const valid = datasource.filterMetricQuery(baseQuery);
  275. expect(valid).toBeFalsy();
  276. });
  277. it('should allow queries that have a sql expresssion', async () => {
  278. baseQuery.sqlExpression = 'select SUM(CPUUtilization) from "AWS/EC2"';
  279. const valid = datasource.filterMetricQuery(baseQuery);
  280. expect(valid).toBeTruthy();
  281. });
  282. });
  283. });
  284. describe('resource requests', () => {
  285. it('should map resource response to metric response', async () => {
  286. const datasource = setupMockedDataSource().datasource;
  287. datasource.doMetricResourceRequest = jest.fn().mockResolvedValue([
  288. {
  289. text: 'AWS/EC2',
  290. value: 'CPUUtilization',
  291. },
  292. {
  293. text: 'AWS/Redshift',
  294. value: 'CPUPercentage',
  295. },
  296. ]);
  297. const allMetrics = await datasource.getAllMetrics('us-east-2');
  298. expect(allMetrics[0].metricName).toEqual('CPUUtilization');
  299. expect(allMetrics[0].namespace).toEqual('AWS/EC2');
  300. expect(allMetrics[1].metricName).toEqual('CPUPercentage');
  301. expect(allMetrics[1].namespace).toEqual('AWS/Redshift');
  302. });
  303. });
  304. describe('performTimeSeriesQuery', () => {
  305. it('should return the same length of data as result', async () => {
  306. const { datasource } = setupMockedDataSource({
  307. data: {
  308. results: {
  309. a: { refId: 'a', series: [{ name: 'cpu', points: [1, 1] }], meta: {} },
  310. b: { refId: 'b', series: [{ name: 'memory', points: [2, 2] }], meta: {} },
  311. },
  312. },
  313. });
  314. const observable = datasource.performTimeSeriesQuery(
  315. {
  316. queries: [
  317. { datasourceId: 1, refId: 'a' },
  318. { datasourceId: 1, refId: 'b' },
  319. ],
  320. } as any,
  321. { from: dateTime(), to: dateTime() } as any
  322. );
  323. await expect(observable).toEmitValuesWith((received) => {
  324. const response = received[0];
  325. expect(response.data.length).toEqual(2);
  326. });
  327. });
  328. it('sets fields.config.interval based on period', async () => {
  329. const { datasource } = setupMockedDataSource({
  330. data: {
  331. results: {
  332. a: {
  333. refId: 'a',
  334. series: [{ name: 'cpu', points: [1, 2], meta: { custom: { period: 60 } } }],
  335. },
  336. b: {
  337. refId: 'b',
  338. series: [{ name: 'cpu', points: [1, 2], meta: { custom: { period: 120 } } }],
  339. },
  340. },
  341. },
  342. });
  343. const observable = datasource.performTimeSeriesQuery(
  344. {
  345. queries: [{ datasourceId: 1, refId: 'a' }],
  346. } as any,
  347. { from: dateTime(), to: dateTime() } as any
  348. );
  349. await expect(observable).toEmitValuesWith((received) => {
  350. const response = received[0];
  351. expect(response.data[0].fields[0].config.interval).toEqual(60000);
  352. expect(response.data[1].fields[0].config.interval).toEqual(120000);
  353. });
  354. });
  355. });
  356. describe('describeLogGroup', () => {
  357. it('replaces region correctly in the query', async () => {
  358. const { datasource, fetchMock } = setupMockedDataSource();
  359. await datasource.describeLogGroups({ region: 'default' });
  360. expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
  361. await datasource.describeLogGroups({ region: 'eu-east' });
  362. expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
  363. });
  364. });
  365. describe('getLogRowContext', () => {
  366. it('replaces parameters correctly in the query', async () => {
  367. const { datasource, fetchMock } = setupMockedDataSource();
  368. const row: LogRowModel = {
  369. entryFieldIndex: 0,
  370. rowIndex: 0,
  371. dataFrame: new MutableDataFrame({
  372. refId: 'B',
  373. fields: [
  374. { name: 'ts', type: FieldType.time, values: [1] },
  375. { name: LOG_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['foo'], labels: {} },
  376. { name: LOGSTREAM_IDENTIFIER_INTERNAL, type: FieldType.string, values: ['bar'], labels: {} },
  377. ],
  378. }),
  379. entry: '4',
  380. labels: {},
  381. hasAnsi: false,
  382. hasUnescapedContent: false,
  383. raw: '4',
  384. logLevel: LogLevel.info,
  385. timeEpochMs: 4,
  386. timeEpochNs: '4000000',
  387. timeFromNow: '',
  388. timeLocal: '',
  389. timeUtc: '',
  390. uid: '1',
  391. };
  392. await datasource.getLogRowContext(row);
  393. expect(fetchMock.mock.calls[0][0].data.queries[0].endTime).toBe(4);
  394. expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe(undefined);
  395. await datasource.getLogRowContext(row, { direction: 'FORWARD' }, { ...validLogsQuery, region: 'eu-east' });
  396. expect(fetchMock.mock.calls[1][0].data.queries[0].startTime).toBe(4);
  397. expect(fetchMock.mock.calls[1][0].data.queries[0].region).toBe('eu-east');
  398. });
  399. });
  400. describe('template variable interpolation', () => {
  401. it('interpolates variables correctly', async () => {
  402. const { datasource, fetchMock } = setupMockedDataSource({
  403. variables: [namespaceVariable, metricVariable, labelsVariable, limitVariable],
  404. });
  405. datasource.handleMetricQueries(
  406. [
  407. {
  408. id: '',
  409. refId: 'a',
  410. region: 'us-east-2',
  411. namespace: '',
  412. period: '',
  413. alias: '',
  414. metricName: '',
  415. dimensions: {},
  416. matchExact: true,
  417. statistic: '',
  418. expression: '',
  419. metricQueryType: MetricQueryType.Query,
  420. metricEditorMode: MetricEditorMode.Code,
  421. sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
  422. },
  423. ],
  424. { range: { from: dateTime(), to: dateTime() } } as any
  425. );
  426. expect(fetchMock).toHaveBeenCalledWith(
  427. expect.objectContaining({
  428. data: expect.objectContaining({
  429. queries: expect.arrayContaining([
  430. expect.objectContaining({
  431. sqlExpression: `SELECT SUM(CPUUtilization) FROM "AWS/EC2" GROUP BY InstanceId,InstanceType LIMIT 100`,
  432. }),
  433. ]),
  434. }),
  435. })
  436. );
  437. });
  438. });
  439. describe('timezoneUTCOffset', () => {
  440. const testQuery = {
  441. id: '',
  442. refId: 'a',
  443. region: 'us-east-2',
  444. namespace: '',
  445. period: '',
  446. label: '${MAX_TIME_RELATIVE}',
  447. metricName: '',
  448. dimensions: {},
  449. matchExact: true,
  450. statistic: '',
  451. expression: '',
  452. metricQueryType: MetricQueryType.Query,
  453. metricEditorMode: MetricEditorMode.Code,
  454. sqlExpression: 'SELECT SUM($metric) FROM "$namespace" GROUP BY ${labels:raw} LIMIT $limit',
  455. };
  456. const testTable = [
  457. ['Europe/Stockholm', '+0200'],
  458. ['America/New_York', '-0400'],
  459. ['Asia/Tokyo', '+0900'],
  460. ['UTC', '+0000'],
  461. ];
  462. describe.each(testTable)('should use the right time zone offset', (ianaTimezone, expectedOffset) => {
  463. const { datasource, fetchMock } = setupMockedDataSource();
  464. datasource.handleMetricQueries([testQuery], {
  465. range: { from: dateTime(), to: dateTime() },
  466. timezone: ianaTimezone,
  467. } as any);
  468. expect(fetchMock).toHaveBeenCalledWith(
  469. expect.objectContaining({
  470. data: expect.objectContaining({
  471. queries: expect.arrayContaining([
  472. expect.objectContaining({
  473. timezoneUTCOffset: expectedOffset,
  474. }),
  475. ]),
  476. }),
  477. })
  478. );
  479. });
  480. });
  481. describe('interpolateMetricsQueryVariables', () => {
  482. it('interpolates dimensions correctly', () => {
  483. const testQuery = {
  484. id: 'a',
  485. refId: 'a',
  486. region: 'us-east-2',
  487. namespace: '',
  488. dimensions: { InstanceId: '$dimension' },
  489. };
  490. const ds = setupMockedDataSource({ variables: [dimensionVariable], mockGetVariableName: false });
  491. const result = ds.datasource.interpolateMetricsQueryVariables(testQuery, {
  492. dimension: { text: 'foo', value: 'foo' },
  493. });
  494. expect(result).toStrictEqual({
  495. alias: '',
  496. metricName: '',
  497. namespace: '',
  498. period: '',
  499. sqlExpression: '',
  500. dimensions: { InstanceId: ['foo'] },
  501. });
  502. });
  503. });
  504. describe('convertMultiFiltersFormat', () => {
  505. const ds = setupMockedDataSource({ variables: [labelsVariable, dimensionVariable], mockGetVariableName: false });
  506. it('converts keys and values correctly', () => {
  507. const filters = { $dimension: ['b'], a: ['$labels', 'bar'] };
  508. const result = ds.datasource.convertMultiFilterFormat(filters);
  509. expect(result).toStrictEqual({
  510. env: ['b'],
  511. a: ['InstanceId', 'InstanceType', 'bar'],
  512. });
  513. });
  514. });
  515. describe('getLogGroupFields', () => {
  516. it('passes region correctly', async () => {
  517. const { datasource, fetchMock } = setupMockedDataSource();
  518. fetchMock.mockReturnValueOnce(
  519. of({
  520. data: {
  521. results: {
  522. A: {
  523. frames: [
  524. dataFrameToJSON(
  525. new MutableDataFrame({
  526. fields: [
  527. { name: 'key', values: [] },
  528. { name: 'val', values: [] },
  529. ],
  530. })
  531. ),
  532. ],
  533. },
  534. },
  535. },
  536. })
  537. );
  538. await datasource.getLogGroupFields({ region: 'us-west-1', logGroupName: 'test' });
  539. expect(fetchMock.mock.calls[0][0].data.queries[0].region).toBe('us-west-1');
  540. });
  541. });
  542. });
  543. function setupForLogs() {
  544. function envelope(frame: DataFrame) {
  545. return { data: { results: { a: { refId: 'a', frames: [dataFrameToJSON(frame)] } } } };
  546. }
  547. const { datasource, fetchMock } = setupMockedDataSource();
  548. const startQueryFrame = new MutableDataFrame({ fields: [{ name: 'queryId', values: ['queryid'] }] });
  549. fetchMock.mockReturnValueOnce(of(envelope(startQueryFrame)));
  550. const logsFrame = new MutableDataFrame({
  551. fields: [
  552. {
  553. name: '@message',
  554. values: new ArrayVector(['something']),
  555. },
  556. {
  557. name: '@timestamp',
  558. values: new ArrayVector([1]),
  559. },
  560. {
  561. name: '@xrayTraceId',
  562. values: new ArrayVector(['1-613f0d6b-3e7cb34375b60662359611bd']),
  563. },
  564. ],
  565. meta: { custom: { Status: CloudWatchLogsQueryStatus.Complete } },
  566. });
  567. fetchMock.mockReturnValueOnce(of(envelope(logsFrame)));
  568. setDataSourceSrv({
  569. async get() {
  570. return {
  571. name: 'Xray',
  572. };
  573. },
  574. } as any);
  575. return { datasource, fetchMock };
  576. }