logs_model.test.ts 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225
  1. import { Observable } from 'rxjs';
  2. import {
  3. ArrayVector,
  4. DataFrame,
  5. DataQuery,
  6. DataQueryRequest,
  7. DataQueryResponse,
  8. dateTimeParse,
  9. FieldType,
  10. LoadingState,
  11. LogLevel,
  12. LogRowModel,
  13. LogsDedupStrategy,
  14. LogsMetaKind,
  15. MutableDataFrame,
  16. toDataFrame,
  17. } from '@grafana/data';
  18. import { MockObservableDataSourceApi } from '../../test/mocks/datasource_srv';
  19. import {
  20. COMMON_LABELS,
  21. dataFrameToLogsModel,
  22. dedupLogRows,
  23. filterLogLevels,
  24. getSeriesProperties,
  25. LIMIT_LABEL,
  26. logSeriesToLogsModel,
  27. queryLogsVolume,
  28. } from './logs_model';
  29. describe('dedupLogRows()', () => {
  30. test('should return rows as is when dedup is set to none', () => {
  31. const rows: LogRowModel[] = [
  32. {
  33. entry: 'WARN test 1.23 on [xxx]',
  34. },
  35. {
  36. entry: 'WARN test 1.23 on [xxx]',
  37. },
  38. ] as any;
  39. expect(dedupLogRows(rows, LogsDedupStrategy.none)).toMatchObject(rows);
  40. });
  41. test('should dedup on exact matches', () => {
  42. const rows: LogRowModel[] = [
  43. {
  44. entry: 'WARN test 1.23 on [xxx]',
  45. },
  46. {
  47. entry: 'WARN test 1.23 on [xxx]',
  48. },
  49. {
  50. entry: 'INFO test 2.44 on [xxx]',
  51. },
  52. {
  53. entry: 'WARN test 1.23 on [xxx]',
  54. },
  55. ] as any;
  56. expect(dedupLogRows(rows, LogsDedupStrategy.exact)).toEqual([
  57. {
  58. duplicates: 1,
  59. entry: 'WARN test 1.23 on [xxx]',
  60. },
  61. {
  62. duplicates: 0,
  63. entry: 'INFO test 2.44 on [xxx]',
  64. },
  65. {
  66. duplicates: 0,
  67. entry: 'WARN test 1.23 on [xxx]',
  68. },
  69. ]);
  70. });
  71. test('should dedup on number matches', () => {
  72. const rows: LogRowModel[] = [
  73. {
  74. entry: 'WARN test 1.2323423 on [xxx]',
  75. },
  76. {
  77. entry: 'WARN test 1.23 on [xxx]',
  78. },
  79. {
  80. entry: 'INFO test 2.44 on [xxx]',
  81. },
  82. {
  83. entry: 'WARN test 1.23 on [xxx]',
  84. },
  85. ] as any;
  86. expect(dedupLogRows(rows, LogsDedupStrategy.numbers)).toEqual([
  87. {
  88. duplicates: 1,
  89. entry: 'WARN test 1.2323423 on [xxx]',
  90. },
  91. {
  92. duplicates: 0,
  93. entry: 'INFO test 2.44 on [xxx]',
  94. },
  95. {
  96. duplicates: 0,
  97. entry: 'WARN test 1.23 on [xxx]',
  98. },
  99. ]);
  100. });
  101. test('should dedup on signature matches', () => {
  102. const rows: LogRowModel[] = [
  103. {
  104. entry: 'WARN test 1.2323423 on [xxx]',
  105. },
  106. {
  107. entry: 'WARN test 1.23 on [xxx]',
  108. },
  109. {
  110. entry: 'INFO test 2.44 on [xxx]',
  111. },
  112. {
  113. entry: 'WARN test 1.23 on [xxx]',
  114. },
  115. ] as any;
  116. expect(dedupLogRows(rows, LogsDedupStrategy.signature)).toEqual([
  117. {
  118. duplicates: 3,
  119. entry: 'WARN test 1.2323423 on [xxx]',
  120. },
  121. ]);
  122. });
  123. test('should return to non-deduped state on same log result', () => {
  124. const rows: LogRowModel[] = [
  125. {
  126. entry: 'INFO 123',
  127. },
  128. {
  129. entry: 'WARN 123',
  130. },
  131. {
  132. entry: 'WARN 123',
  133. },
  134. ] as any;
  135. expect(dedupLogRows(rows, LogsDedupStrategy.exact)).toEqual([
  136. {
  137. duplicates: 0,
  138. entry: 'INFO 123',
  139. },
  140. {
  141. duplicates: 1,
  142. entry: 'WARN 123',
  143. },
  144. ]);
  145. expect(dedupLogRows(rows, LogsDedupStrategy.none)).toEqual(rows);
  146. });
  147. });
  148. describe('filterLogLevels()', () => {
  149. test('should correctly filter out log levels', () => {
  150. const rows: LogRowModel[] = [
  151. {
  152. entry: 'DEBUG 1',
  153. logLevel: LogLevel.debug,
  154. },
  155. {
  156. entry: 'ERROR 1',
  157. logLevel: LogLevel.error,
  158. },
  159. {
  160. entry: 'TRACE 1',
  161. logLevel: LogLevel.trace,
  162. },
  163. ] as any;
  164. const filteredLogs = filterLogLevels(rows, new Set([LogLevel.debug]));
  165. expect(filteredLogs.length).toBe(2);
  166. expect(filteredLogs).toEqual([
  167. { entry: 'ERROR 1', logLevel: 'error' },
  168. { entry: 'TRACE 1', logLevel: 'trace' },
  169. ]);
  170. });
  171. test('should correctly filter out log levels and then deduplicate', () => {
  172. const rows: LogRowModel[] = [
  173. {
  174. entry: 'DEBUG 1',
  175. logLevel: LogLevel.debug,
  176. },
  177. {
  178. entry: 'DEBUG 2',
  179. logLevel: LogLevel.debug,
  180. },
  181. {
  182. entry: 'DEBUG 2',
  183. logLevel: LogLevel.debug,
  184. },
  185. {
  186. entry: 'ERROR 1',
  187. logLevel: LogLevel.error,
  188. },
  189. {
  190. entry: 'TRACE 1',
  191. logLevel: LogLevel.trace,
  192. },
  193. ] as any;
  194. const filteredLogs = filterLogLevels(rows, new Set([LogLevel.error]));
  195. const deduplicatedLogs = dedupLogRows(filteredLogs, LogsDedupStrategy.exact);
  196. expect(deduplicatedLogs.length).toBe(3);
  197. expect(deduplicatedLogs).toEqual([
  198. { duplicates: 0, entry: 'DEBUG 1', logLevel: 'debug' },
  199. { duplicates: 1, entry: 'DEBUG 2', logLevel: 'debug' },
  200. { duplicates: 0, entry: 'TRACE 1', logLevel: 'trace' },
  201. ]);
  202. });
  203. });
  204. const emptyLogsModel: any = {
  205. hasUniqueLabels: false,
  206. rows: [],
  207. meta: [],
  208. series: [],
  209. };
  210. describe('dataFrameToLogsModel', () => {
  211. it('given empty series should return empty logs model', () => {
  212. expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
  213. });
  214. it('given series without correct series name should return empty logs model', () => {
  215. const series: DataFrame[] = [
  216. toDataFrame({
  217. fields: [],
  218. }),
  219. ];
  220. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  221. });
  222. it('given series without a time field should return empty logs model', () => {
  223. const series: DataFrame[] = [
  224. new MutableDataFrame({
  225. fields: [
  226. {
  227. name: 'message',
  228. type: FieldType.string,
  229. values: [],
  230. },
  231. ],
  232. }),
  233. ];
  234. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  235. });
  236. it('given series without a string field should return empty logs model', () => {
  237. const series: DataFrame[] = [
  238. new MutableDataFrame({
  239. fields: [
  240. {
  241. name: 'time',
  242. type: FieldType.time,
  243. values: [],
  244. },
  245. ],
  246. }),
  247. ];
  248. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  249. });
  250. it('given one series should return expected logs model', () => {
  251. const series: DataFrame[] = [
  252. new MutableDataFrame({
  253. fields: [
  254. {
  255. name: 'time',
  256. type: FieldType.time,
  257. values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
  258. },
  259. {
  260. name: 'message',
  261. type: FieldType.string,
  262. values: [
  263. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  264. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  265. ],
  266. labels: {
  267. filename: '/var/log/grafana/grafana.log',
  268. job: 'grafana',
  269. },
  270. },
  271. {
  272. name: 'id',
  273. type: FieldType.string,
  274. values: ['foo', 'bar'],
  275. },
  276. ],
  277. meta: {
  278. limit: 1000,
  279. },
  280. }),
  281. ];
  282. const logsModel = dataFrameToLogsModel(series, 1);
  283. expect(logsModel.hasUniqueLabels).toBeFalsy();
  284. expect(logsModel.rows).toHaveLength(2);
  285. expect(logsModel.rows).toMatchObject([
  286. {
  287. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  288. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  289. logLevel: 'info',
  290. uniqueLabels: {},
  291. uid: 'foo',
  292. },
  293. {
  294. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  295. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  296. logLevel: 'error',
  297. uniqueLabels: {},
  298. uid: 'bar',
  299. },
  300. ]);
  301. expect(logsModel.series).toHaveLength(2);
  302. expect(logsModel.series).toMatchObject([
  303. {
  304. name: 'info',
  305. fields: [
  306. { type: 'time', values: new ArrayVector([1556270891000, 1556289770000]) },
  307. { type: 'number', values: new ArrayVector([1, 0]) },
  308. ],
  309. },
  310. {
  311. name: 'error',
  312. fields: [
  313. { type: 'time', values: new ArrayVector([1556289770000]) },
  314. { type: 'number', values: new ArrayVector([1]) },
  315. ],
  316. },
  317. ]);
  318. expect(logsModel.meta).toHaveLength(2);
  319. expect(logsModel.meta![0]).toMatchObject({
  320. label: COMMON_LABELS,
  321. value: {
  322. filename: '/var/log/grafana/grafana.log',
  323. job: 'grafana',
  324. },
  325. kind: LogsMetaKind.LabelsMap,
  326. });
  327. expect(logsModel.meta![1]).toMatchObject({
  328. label: LIMIT_LABEL,
  329. value: `1000 (2 returned)`,
  330. kind: LogsMetaKind.String,
  331. });
  332. });
  333. it('given one series with labels-field should return expected logs model', () => {
  334. const series: DataFrame[] = [
  335. new MutableDataFrame({
  336. fields: [
  337. {
  338. name: 'labels',
  339. type: FieldType.other,
  340. values: [
  341. {
  342. filename: '/var/log/grafana/grafana.log',
  343. job: 'grafana',
  344. },
  345. {
  346. filename: '/var/log/grafana/grafana.log',
  347. job: 'grafana',
  348. },
  349. ],
  350. },
  351. {
  352. name: 'time',
  353. type: FieldType.time,
  354. values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
  355. },
  356. {
  357. name: 'message',
  358. type: FieldType.string,
  359. values: [
  360. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  361. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  362. ],
  363. },
  364. {
  365. name: 'id',
  366. type: FieldType.string,
  367. values: ['foo', 'bar'],
  368. },
  369. ],
  370. meta: {
  371. limit: 1000,
  372. custom: {
  373. frameType: 'LabeledTimeValues',
  374. },
  375. },
  376. }),
  377. ];
  378. const logsModel = dataFrameToLogsModel(series, 1);
  379. expect(logsModel.hasUniqueLabels).toBeFalsy();
  380. expect(logsModel.rows).toHaveLength(2);
  381. expect(logsModel.rows).toMatchObject([
  382. {
  383. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  384. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  385. logLevel: 'info',
  386. uniqueLabels: {},
  387. uid: 'foo',
  388. },
  389. {
  390. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  391. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  392. logLevel: 'error',
  393. uniqueLabels: {},
  394. uid: 'bar',
  395. },
  396. ]);
  397. expect(logsModel.series).toHaveLength(2);
  398. expect(logsModel.series).toMatchObject([
  399. {
  400. name: 'info',
  401. fields: [
  402. { type: 'time', values: new ArrayVector([1556270891000, 1556289770000]) },
  403. { type: 'number', values: new ArrayVector([1, 0]) },
  404. ],
  405. },
  406. {
  407. name: 'error',
  408. fields: [
  409. { type: 'time', values: new ArrayVector([1556289770000]) },
  410. { type: 'number', values: new ArrayVector([1]) },
  411. ],
  412. },
  413. ]);
  414. expect(logsModel.meta).toHaveLength(2);
  415. expect(logsModel.meta![0]).toMatchObject({
  416. label: COMMON_LABELS,
  417. value: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  418. kind: LogsMetaKind.LabelsMap,
  419. });
  420. expect(logsModel.meta![1]).toMatchObject({
  421. label: LIMIT_LABEL,
  422. value: `1000 (2 returned)`,
  423. kind: LogsMetaKind.String,
  424. });
  425. });
  426. it('given one series with labels-field it should work regardless the label-fields position', () => {
  427. const labels = {
  428. name: 'labels',
  429. type: FieldType.other,
  430. values: [
  431. {
  432. node: 'first',
  433. mode: 'slow',
  434. },
  435. ],
  436. };
  437. const time = {
  438. name: 'time',
  439. type: FieldType.time,
  440. values: ['2019-04-26T09:28:11.352440161Z'],
  441. };
  442. const line = {
  443. name: 'line',
  444. type: FieldType.string,
  445. values: ['line1'],
  446. };
  447. const meta = {
  448. custom: {
  449. frameType: 'LabeledTimeValues',
  450. },
  451. };
  452. const frame1 = new MutableDataFrame({
  453. meta,
  454. fields: [labels, time, line],
  455. });
  456. const frame2 = new MutableDataFrame({
  457. meta,
  458. fields: [time, labels, line],
  459. });
  460. const frame3 = new MutableDataFrame({
  461. meta,
  462. fields: [time, line, labels],
  463. });
  464. const logsModel1 = dataFrameToLogsModel([frame1], 1);
  465. expect(logsModel1.rows).toHaveLength(1);
  466. expect(logsModel1.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
  467. const logsModel2 = dataFrameToLogsModel([frame2], 1);
  468. expect(logsModel2.rows).toHaveLength(1);
  469. expect(logsModel2.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
  470. const logsModel3 = dataFrameToLogsModel([frame3], 1);
  471. expect(logsModel3.rows).toHaveLength(1);
  472. expect(logsModel3.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
  473. });
  474. it('given one series with error should return expected logs model', () => {
  475. const series: DataFrame[] = [
  476. new MutableDataFrame({
  477. fields: [
  478. {
  479. name: 'time',
  480. type: FieldType.time,
  481. values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
  482. },
  483. {
  484. name: 'message',
  485. type: FieldType.string,
  486. values: [
  487. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  488. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  489. ],
  490. labels: {
  491. filename: '/var/log/grafana/grafana.log',
  492. job: 'grafana',
  493. __error__: 'Failed while parsing',
  494. },
  495. },
  496. {
  497. name: 'id',
  498. type: FieldType.string,
  499. values: ['foo', 'bar'],
  500. },
  501. ],
  502. meta: {
  503. limit: 1000,
  504. custom: {
  505. error: 'Error when parsing some of the logs',
  506. },
  507. },
  508. }),
  509. ];
  510. const logsModel = dataFrameToLogsModel(series, 1);
  511. expect(logsModel.hasUniqueLabels).toBeFalsy();
  512. expect(logsModel.rows).toHaveLength(2);
  513. expect(logsModel.rows).toMatchObject([
  514. {
  515. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  516. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana', __error__: 'Failed while parsing' },
  517. logLevel: 'info',
  518. uniqueLabels: {},
  519. uid: 'foo',
  520. },
  521. {
  522. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  523. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana', __error__: 'Failed while parsing' },
  524. logLevel: 'error',
  525. uniqueLabels: {},
  526. uid: 'bar',
  527. },
  528. ]);
  529. expect(logsModel.series).toHaveLength(2);
  530. expect(logsModel.meta).toHaveLength(3);
  531. expect(logsModel.meta![0]).toMatchObject({
  532. label: COMMON_LABELS,
  533. value: series[0].fields[1].labels,
  534. kind: LogsMetaKind.LabelsMap,
  535. });
  536. expect(logsModel.meta![1]).toMatchObject({
  537. label: LIMIT_LABEL,
  538. value: `1000 (2 returned)`,
  539. kind: LogsMetaKind.String,
  540. });
  541. expect(logsModel.meta![2]).toMatchObject({
  542. label: '',
  543. value: 'Error when parsing some of the logs',
  544. kind: LogsMetaKind.Error,
  545. });
  546. });
  547. it('given one series without labels should return expected logs model', () => {
  548. const series: DataFrame[] = [
  549. new MutableDataFrame({
  550. fields: [
  551. {
  552. name: 'time',
  553. type: FieldType.time,
  554. values: ['1970-01-01T00:00:01Z'],
  555. },
  556. {
  557. name: 'message',
  558. type: FieldType.string,
  559. values: ['WARN boooo'],
  560. },
  561. {
  562. name: 'level',
  563. type: FieldType.string,
  564. values: ['dbug'],
  565. },
  566. ],
  567. }),
  568. ];
  569. const logsModel = dataFrameToLogsModel(series, 1);
  570. expect(logsModel.rows).toHaveLength(1);
  571. expect(logsModel.rows).toMatchObject([
  572. {
  573. entry: 'WARN boooo',
  574. labels: {},
  575. logLevel: LogLevel.debug,
  576. uniqueLabels: {},
  577. },
  578. ]);
  579. });
  580. it('given multiple series with unique times should return expected logs model', () => {
  581. const series: DataFrame[] = [
  582. toDataFrame({
  583. fields: [
  584. {
  585. name: 'ts',
  586. type: FieldType.time,
  587. values: ['1970-01-01T00:00:01Z'],
  588. },
  589. {
  590. name: 'line',
  591. type: FieldType.string,
  592. values: ['WARN boooo'],
  593. labels: {
  594. foo: 'bar',
  595. baz: '1',
  596. level: 'dbug',
  597. },
  598. },
  599. {
  600. name: 'id',
  601. type: FieldType.string,
  602. values: ['0'],
  603. },
  604. ],
  605. }),
  606. toDataFrame({
  607. name: 'logs',
  608. fields: [
  609. {
  610. name: 'time',
  611. type: FieldType.time,
  612. values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:02Z'],
  613. },
  614. {
  615. name: 'message',
  616. type: FieldType.string,
  617. values: ['INFO 1', 'INFO 2'],
  618. labels: {
  619. foo: 'bar',
  620. baz: '2',
  621. level: 'err',
  622. },
  623. },
  624. {
  625. name: 'id',
  626. type: FieldType.string,
  627. values: ['1', '2'],
  628. },
  629. ],
  630. }),
  631. ];
  632. const logsModel = dataFrameToLogsModel(series, 1);
  633. expect(logsModel.hasUniqueLabels).toBeTruthy();
  634. expect(logsModel.rows).toHaveLength(3);
  635. expect(logsModel.rows).toMatchObject([
  636. {
  637. entry: 'INFO 1',
  638. labels: { foo: 'bar', baz: '2' },
  639. logLevel: LogLevel.error,
  640. uniqueLabels: { baz: '2' },
  641. },
  642. {
  643. entry: 'WARN boooo',
  644. labels: { foo: 'bar', baz: '1' },
  645. logLevel: LogLevel.debug,
  646. uniqueLabels: { baz: '1' },
  647. },
  648. {
  649. entry: 'INFO 2',
  650. labels: { foo: 'bar', baz: '2' },
  651. logLevel: LogLevel.error,
  652. uniqueLabels: { baz: '2' },
  653. },
  654. ]);
  655. expect(logsModel.series).toHaveLength(2);
  656. expect(logsModel.series).toMatchObject([
  657. {
  658. name: 'error',
  659. fields: [
  660. { type: 'time', values: new ArrayVector([0, 1000, 2000]) },
  661. { type: 'number', values: new ArrayVector([1, 0, 1]) },
  662. ],
  663. },
  664. {
  665. name: 'debug',
  666. fields: [
  667. { type: 'time', values: new ArrayVector([1000, 2000]) },
  668. { type: 'number', values: new ArrayVector([1, 0]) },
  669. ],
  670. },
  671. ]);
  672. expect(logsModel.meta).toHaveLength(1);
  673. expect(logsModel.meta![0]).toMatchObject({
  674. label: COMMON_LABELS,
  675. value: {
  676. foo: 'bar',
  677. },
  678. kind: LogsMetaKind.LabelsMap,
  679. });
  680. });
  681. it('given multiple series with equal times should return expected logs model', () => {
  682. const series: DataFrame[] = [
  683. toDataFrame({
  684. fields: [
  685. {
  686. name: 'ts',
  687. type: FieldType.time,
  688. values: ['1970-01-01T00:00:00Z'],
  689. },
  690. {
  691. name: 'line',
  692. type: FieldType.string,
  693. values: ['WARN boooo 1'],
  694. labels: {
  695. foo: 'bar',
  696. baz: '1',
  697. level: 'dbug',
  698. },
  699. },
  700. {
  701. name: 'id',
  702. type: FieldType.string,
  703. values: ['0'],
  704. },
  705. ],
  706. }),
  707. toDataFrame({
  708. fields: [
  709. {
  710. name: 'ts',
  711. type: FieldType.time,
  712. values: ['1970-01-01T00:00:01Z'],
  713. },
  714. {
  715. name: 'line',
  716. type: FieldType.string,
  717. values: ['WARN boooo 2'],
  718. labels: {
  719. foo: 'bar',
  720. baz: '2',
  721. level: 'dbug',
  722. },
  723. },
  724. {
  725. name: 'id',
  726. type: FieldType.string,
  727. values: ['1'],
  728. },
  729. ],
  730. }),
  731. toDataFrame({
  732. name: 'logs',
  733. fields: [
  734. {
  735. name: 'time',
  736. type: FieldType.time,
  737. values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:01Z'],
  738. },
  739. {
  740. name: 'message',
  741. type: FieldType.string,
  742. values: ['INFO 1', 'INFO 2'],
  743. labels: {
  744. foo: 'bar',
  745. baz: '2',
  746. level: 'err',
  747. },
  748. },
  749. {
  750. name: 'id',
  751. type: FieldType.string,
  752. values: ['2', '3'],
  753. },
  754. ],
  755. }),
  756. ];
  757. const logsModel = dataFrameToLogsModel(series, 1);
  758. expect(logsModel.hasUniqueLabels).toBeTruthy();
  759. expect(logsModel.rows).toHaveLength(4);
  760. expect(logsModel.rows).toMatchObject([
  761. {
  762. entry: 'WARN boooo 1',
  763. labels: { foo: 'bar', baz: '1' },
  764. logLevel: LogLevel.debug,
  765. uniqueLabels: { baz: '1' },
  766. },
  767. {
  768. entry: 'INFO 1',
  769. labels: { foo: 'bar', baz: '2' },
  770. logLevel: LogLevel.error,
  771. uniqueLabels: { baz: '2' },
  772. },
  773. {
  774. entry: 'WARN boooo 2',
  775. labels: { foo: 'bar', baz: '2' },
  776. logLevel: LogLevel.debug,
  777. uniqueLabels: { baz: '2' },
  778. },
  779. {
  780. entry: 'INFO 2',
  781. labels: { foo: 'bar', baz: '2' },
  782. logLevel: LogLevel.error,
  783. uniqueLabels: { baz: '2' },
  784. },
  785. ]);
  786. });
  787. it('should return expected line limit meta info when returned number of series equal the log limit', () => {
  788. const series: DataFrame[] = [
  789. new MutableDataFrame({
  790. fields: [
  791. {
  792. name: 'time',
  793. type: FieldType.time,
  794. values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
  795. },
  796. {
  797. name: 'message',
  798. type: FieldType.string,
  799. values: [
  800. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  801. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  802. ],
  803. labels: {
  804. filename: '/var/log/grafana/grafana.log',
  805. job: 'grafana',
  806. },
  807. },
  808. {
  809. name: 'id',
  810. type: FieldType.string,
  811. values: ['foo', 'bar'],
  812. },
  813. ],
  814. meta: {
  815. limit: 2,
  816. },
  817. }),
  818. ];
  819. const logsModel = dataFrameToLogsModel(series, 1, { from: 1556270591353, to: 1556289770991 });
  820. expect(logsModel.meta).toHaveLength(2);
  821. expect(logsModel.meta![0]).toMatchObject({
  822. label: COMMON_LABELS,
  823. value: series[0].fields[1].labels,
  824. kind: LogsMetaKind.LabelsMap,
  825. });
  826. expect(logsModel.meta![1]).toMatchObject({
  827. label: LIMIT_LABEL,
  828. value: `2 reached, received logs cover 98.44% (5h 14min 40sec) of your selected time range (5h 19min 40sec)`,
  829. kind: LogsMetaKind.String,
  830. });
  831. });
  832. it('should fallback to row index if no id', () => {
  833. const series: DataFrame[] = [
  834. toDataFrame({
  835. labels: { foo: 'bar' },
  836. fields: [
  837. {
  838. name: 'ts',
  839. type: FieldType.time,
  840. values: ['1970-01-01T00:00:00Z'],
  841. },
  842. {
  843. name: 'line',
  844. type: FieldType.string,
  845. values: ['WARN boooo 1'],
  846. },
  847. ],
  848. }),
  849. ];
  850. const logsModel = dataFrameToLogsModel(series, 1);
  851. expect(logsModel.rows[0].uid).toBe('0');
  852. });
  853. });
  854. describe('logSeriesToLogsModel', () => {
  855. it('should return correct metaData even if the data is empty', () => {
  856. const logSeries: DataFrame[] = [
  857. {
  858. fields: [],
  859. length: 0,
  860. refId: 'A',
  861. meta: {
  862. searchWords: ['test'],
  863. limit: 1000,
  864. stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
  865. custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
  866. preferredVisualisationType: 'logs',
  867. },
  868. },
  869. ];
  870. const metaData = {
  871. hasUniqueLabels: false,
  872. meta: [
  873. { label: LIMIT_LABEL, value: 1000, kind: 0 },
  874. { label: 'Total bytes processed', value: '97.0 kB', kind: 1 },
  875. ],
  876. rows: [],
  877. };
  878. expect(logSeriesToLogsModel(logSeries)).toMatchObject(metaData);
  879. });
  880. it('should return correct metaData when some data frames have empty fields', () => {
  881. const logSeries: DataFrame[] = [
  882. toDataFrame({
  883. fields: [
  884. {
  885. name: 'ts',
  886. type: FieldType.time,
  887. values: ['1970-01-01T00:00:01Z', '1970-02-01T00:00:01Z', '1970-03-01T00:00:01Z'],
  888. },
  889. {
  890. name: 'line',
  891. type: FieldType.string,
  892. values: ['WARN boooo 0', 'WARN boooo 1', 'WARN boooo 2'],
  893. labels: {
  894. foo: 'bar',
  895. level: 'dbug',
  896. },
  897. },
  898. {
  899. name: 'id',
  900. type: FieldType.string,
  901. values: ['0', '1', '2'],
  902. },
  903. ],
  904. refId: 'A',
  905. meta: {
  906. searchWords: ['test'],
  907. limit: 1000,
  908. stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
  909. custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
  910. preferredVisualisationType: 'logs',
  911. },
  912. }),
  913. toDataFrame({
  914. fields: [],
  915. length: 0,
  916. refId: 'B',
  917. meta: {
  918. searchWords: ['test'],
  919. limit: 1000,
  920. stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
  921. custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
  922. preferredVisualisationType: 'logs',
  923. },
  924. }),
  925. ];
  926. const logsModel = dataFrameToLogsModel(logSeries, 0);
  927. expect(logsModel.meta).toMatchObject([
  928. { kind: 2, label: COMMON_LABELS, value: { foo: 'bar', level: 'dbug' } },
  929. { kind: 0, label: LIMIT_LABEL, value: 2000 },
  930. { kind: 1, label: 'Total bytes processed', value: '194 kB' },
  931. ]);
  932. expect(logsModel.rows).toHaveLength(3);
  933. expect(logsModel.rows).toMatchObject([
  934. {
  935. entry: 'WARN boooo 0',
  936. labels: { foo: 'bar' },
  937. logLevel: LogLevel.debug,
  938. },
  939. {
  940. entry: 'WARN boooo 1',
  941. labels: { foo: 'bar' },
  942. logLevel: LogLevel.debug,
  943. },
  944. {
  945. entry: 'WARN boooo 2',
  946. labels: { foo: 'bar' },
  947. logLevel: LogLevel.debug,
  948. },
  949. ]);
  950. });
  951. it('should return empty string if message field is undefined', () => {
  952. const logSeries: DataFrame[] = [
  953. toDataFrame({
  954. fields: [
  955. {
  956. name: 'ts',
  957. type: FieldType.time,
  958. values: ['1970-01-01T00:00:01Z', '1970-02-01T00:00:01Z', '1970-03-01T00:00:01Z'],
  959. },
  960. {
  961. name: 'line',
  962. type: FieldType.string,
  963. values: ['WARN boooo 0', undefined, 'WARN boooo 2'],
  964. labels: {
  965. foo: 'bar',
  966. level: 'dbug',
  967. },
  968. },
  969. {
  970. name: 'id',
  971. type: FieldType.string,
  972. values: ['0', '1', '2'],
  973. },
  974. ],
  975. refId: 'A',
  976. meta: {},
  977. }),
  978. ];
  979. const logsModel = dataFrameToLogsModel(logSeries, 0);
  980. expect(logsModel.rows).toHaveLength(3);
  981. expect(logsModel.rows).toMatchObject([
  982. {
  983. entry: 'WARN boooo 0',
  984. labels: { foo: 'bar' },
  985. logLevel: LogLevel.debug,
  986. },
  987. {
  988. entry: '',
  989. labels: { foo: 'bar' },
  990. logLevel: LogLevel.debug,
  991. },
  992. {
  993. entry: 'WARN boooo 2',
  994. labels: { foo: 'bar' },
  995. logLevel: LogLevel.debug,
  996. },
  997. ]);
  998. });
  999. it('should correctly get the log level if the message has ANSI color', () => {
  1000. const logSeries: DataFrame[] = [
  1001. toDataFrame({
  1002. fields: [
  1003. {
  1004. name: 'ts',
  1005. type: FieldType.time,
  1006. values: ['1970-01-01T00:00:01Z'],
  1007. },
  1008. {
  1009. name: 'line',
  1010. type: FieldType.string,
  1011. values: ['Line with ANSI \u001B[31mwarn\u001B[0m et dolor'],
  1012. },
  1013. {
  1014. name: 'id',
  1015. type: FieldType.string,
  1016. values: ['0'],
  1017. },
  1018. ],
  1019. refId: 'A',
  1020. meta: {},
  1021. }),
  1022. ];
  1023. const logsModel = dataFrameToLogsModel(logSeries, 0);
  1024. expect(logsModel.rows).toHaveLength(1);
  1025. expect(logsModel.rows[0].logLevel).toEqual(LogLevel.warn);
  1026. });
  1027. });
  1028. describe('getSeriesProperties()', () => {
  1029. it('sets a minimum bucket size', () => {
  1030. const result = getSeriesProperties([], 2, undefined, 3, 123);
  1031. expect(result.bucketSize).toBe(123);
  1032. });
  1033. it('does not adjust the bucketSize if there is no range', () => {
  1034. const result = getSeriesProperties([], 30, undefined, 70);
  1035. expect(result.bucketSize).toBe(2100);
  1036. });
  1037. it('does not adjust the bucketSize if the logs row times match the given range', () => {
  1038. const rows: LogRowModel[] = [
  1039. { entry: 'foo', timeEpochMs: 10 },
  1040. { entry: 'bar', timeEpochMs: 20 },
  1041. ] as any;
  1042. const range = { from: 10, to: 20 };
  1043. const result = getSeriesProperties(rows, 1, range, 2, 1);
  1044. expect(result.bucketSize).toBe(2);
  1045. expect(result.visibleRange).toMatchObject(range);
  1046. });
  1047. it('clamps the range and adjusts the bucketSize if the logs row times do not completely cover the given range', () => {
  1048. const rows: LogRowModel[] = [
  1049. { entry: 'foo', timeEpochMs: 10 },
  1050. { entry: 'bar', timeEpochMs: 20 },
  1051. ] as any;
  1052. const range = { from: 0, to: 30 };
  1053. const result = getSeriesProperties(rows, 3, range, 2, 1);
  1054. // Bucketsize 6 gets shortened to 4 because of new visible range is 20ms vs original range being 30ms
  1055. expect(result.bucketSize).toBe(4);
  1056. // From time is also aligned to bucketSize (divisible by 4)
  1057. expect(result.visibleRange).toMatchObject({ from: 8, to: 30 });
  1058. });
  1059. });
  1060. describe('logs volume', () => {
  1061. class TestDataQuery implements DataQuery {
  1062. refId = 'a';
  1063. target = '';
  1064. }
  1065. let volumeProvider: Observable<DataQueryResponse>,
  1066. datasource: MockObservableDataSourceApi,
  1067. request: DataQueryRequest<TestDataQuery>;
  1068. function createFrame(labels: object, timestamps: number[], values: number[]) {
  1069. return toDataFrame({
  1070. fields: [
  1071. { name: 'Time', type: FieldType.time, values: timestamps },
  1072. {
  1073. name: 'Number',
  1074. type: FieldType.number,
  1075. values,
  1076. labels,
  1077. },
  1078. ],
  1079. });
  1080. }
  1081. function createExpectedFields(levelName: string, timestamps: number[], values: number[]) {
  1082. return [
  1083. { name: 'Time', values: { buffer: timestamps } },
  1084. {
  1085. name: 'Value',
  1086. config: { displayNameFromDS: levelName },
  1087. values: { buffer: values },
  1088. },
  1089. ];
  1090. }
  1091. function setup(datasourceSetup: () => void) {
  1092. datasourceSetup();
  1093. request = {
  1094. targets: [{ target: 'volume query 1' }, { target: 'volume query 2' }],
  1095. scopedVars: {},
  1096. } as unknown as DataQueryRequest<TestDataQuery>;
  1097. volumeProvider = queryLogsVolume(datasource, request, {
  1098. extractLevel: (dataFrame: DataFrame) => {
  1099. return dataFrame.fields[1]!.labels!.level === 'error' ? LogLevel.error : LogLevel.unknown;
  1100. },
  1101. range: {
  1102. from: dateTimeParse('2021-06-17 00:00:00', { timeZone: 'utc' }),
  1103. to: dateTimeParse('2021-06-17 00:00:00', { timeZone: 'utc' }),
  1104. raw: { from: '0', to: '1' },
  1105. },
  1106. targets: request.targets,
  1107. });
  1108. }
  1109. function setupMultipleResults() {
  1110. // level=unknown
  1111. const resultAFrame1 = createFrame({ app: 'app01' }, [100, 200, 300], [5, 5, 5]);
  1112. // level=error
  1113. const resultAFrame2 = createFrame({ app: 'app01', level: 'error' }, [100, 200, 300], [0, 1, 0]);
  1114. // level=unknown
  1115. const resultBFrame1 = createFrame({ app: 'app02' }, [100, 200, 300], [1, 2, 3]);
  1116. // level=error
  1117. const resultBFrame2 = createFrame({ app: 'app02', level: 'error' }, [100, 200, 300], [1, 1, 1]);
  1118. datasource = new MockObservableDataSourceApi('loki', [
  1119. {
  1120. data: [resultAFrame1, resultAFrame2],
  1121. },
  1122. {
  1123. data: [resultBFrame1, resultBFrame2],
  1124. },
  1125. ]);
  1126. }
  1127. function setupErrorResponse() {
  1128. datasource = new MockObservableDataSourceApi('loki', [], undefined, 'Error message');
  1129. }
  1130. it('aggregates data frames by level', async () => {
  1131. setup(setupMultipleResults);
  1132. await expect(volumeProvider).toEmitValuesWith((received) => {
  1133. expect(received).toMatchObject([
  1134. { state: LoadingState.Loading, error: undefined, data: [] },
  1135. {
  1136. state: LoadingState.Done,
  1137. error: undefined,
  1138. data: [
  1139. {
  1140. fields: createExpectedFields('unknown', [100, 200, 300], [6, 7, 8]),
  1141. },
  1142. {
  1143. fields: createExpectedFields('error', [100, 200, 300], [1, 2, 1]),
  1144. },
  1145. ],
  1146. },
  1147. ]);
  1148. });
  1149. });
  1150. it('returns error', async () => {
  1151. setup(setupErrorResponse);
  1152. await expect(volumeProvider).toEmitValuesWith((received) => {
  1153. expect(received).toMatchObject([
  1154. { state: LoadingState.Loading, error: undefined, data: [] },
  1155. {
  1156. state: LoadingState.Error,
  1157. error: 'Error message',
  1158. data: [],
  1159. },
  1160. 'Error message',
  1161. ]);
  1162. });
  1163. });
  1164. });