1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225 |
- import { Observable } from 'rxjs';
- import {
- ArrayVector,
- DataFrame,
- DataQuery,
- DataQueryRequest,
- DataQueryResponse,
- dateTimeParse,
- FieldType,
- LoadingState,
- LogLevel,
- LogRowModel,
- LogsDedupStrategy,
- LogsMetaKind,
- MutableDataFrame,
- toDataFrame,
- } from '@grafana/data';
- import { MockObservableDataSourceApi } from '../../test/mocks/datasource_srv';
- import {
- COMMON_LABELS,
- dataFrameToLogsModel,
- dedupLogRows,
- filterLogLevels,
- getSeriesProperties,
- LIMIT_LABEL,
- logSeriesToLogsModel,
- queryLogsVolume,
- } from './logs_model';
- describe('dedupLogRows()', () => {
- test('should return rows as is when dedup is set to none', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- ] as any;
- expect(dedupLogRows(rows, LogsDedupStrategy.none)).toMatchObject(rows);
- });
- test('should dedup on exact matches', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- entry: 'INFO test 2.44 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- ] as any;
- expect(dedupLogRows(rows, LogsDedupStrategy.exact)).toEqual([
- {
- duplicates: 1,
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- duplicates: 0,
- entry: 'INFO test 2.44 on [xxx]',
- },
- {
- duplicates: 0,
- entry: 'WARN test 1.23 on [xxx]',
- },
- ]);
- });
- test('should dedup on number matches', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'WARN test 1.2323423 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- entry: 'INFO test 2.44 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- ] as any;
- expect(dedupLogRows(rows, LogsDedupStrategy.numbers)).toEqual([
- {
- duplicates: 1,
- entry: 'WARN test 1.2323423 on [xxx]',
- },
- {
- duplicates: 0,
- entry: 'INFO test 2.44 on [xxx]',
- },
- {
- duplicates: 0,
- entry: 'WARN test 1.23 on [xxx]',
- },
- ]);
- });
- test('should dedup on signature matches', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'WARN test 1.2323423 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- {
- entry: 'INFO test 2.44 on [xxx]',
- },
- {
- entry: 'WARN test 1.23 on [xxx]',
- },
- ] as any;
- expect(dedupLogRows(rows, LogsDedupStrategy.signature)).toEqual([
- {
- duplicates: 3,
- entry: 'WARN test 1.2323423 on [xxx]',
- },
- ]);
- });
- test('should return to non-deduped state on same log result', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'INFO 123',
- },
- {
- entry: 'WARN 123',
- },
- {
- entry: 'WARN 123',
- },
- ] as any;
- expect(dedupLogRows(rows, LogsDedupStrategy.exact)).toEqual([
- {
- duplicates: 0,
- entry: 'INFO 123',
- },
- {
- duplicates: 1,
- entry: 'WARN 123',
- },
- ]);
- expect(dedupLogRows(rows, LogsDedupStrategy.none)).toEqual(rows);
- });
- });
- describe('filterLogLevels()', () => {
- test('should correctly filter out log levels', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'DEBUG 1',
- logLevel: LogLevel.debug,
- },
- {
- entry: 'ERROR 1',
- logLevel: LogLevel.error,
- },
- {
- entry: 'TRACE 1',
- logLevel: LogLevel.trace,
- },
- ] as any;
- const filteredLogs = filterLogLevels(rows, new Set([LogLevel.debug]));
- expect(filteredLogs.length).toBe(2);
- expect(filteredLogs).toEqual([
- { entry: 'ERROR 1', logLevel: 'error' },
- { entry: 'TRACE 1', logLevel: 'trace' },
- ]);
- });
- test('should correctly filter out log levels and then deduplicate', () => {
- const rows: LogRowModel[] = [
- {
- entry: 'DEBUG 1',
- logLevel: LogLevel.debug,
- },
- {
- entry: 'DEBUG 2',
- logLevel: LogLevel.debug,
- },
- {
- entry: 'DEBUG 2',
- logLevel: LogLevel.debug,
- },
- {
- entry: 'ERROR 1',
- logLevel: LogLevel.error,
- },
- {
- entry: 'TRACE 1',
- logLevel: LogLevel.trace,
- },
- ] as any;
- const filteredLogs = filterLogLevels(rows, new Set([LogLevel.error]));
- const deduplicatedLogs = dedupLogRows(filteredLogs, LogsDedupStrategy.exact);
- expect(deduplicatedLogs.length).toBe(3);
- expect(deduplicatedLogs).toEqual([
- { duplicates: 0, entry: 'DEBUG 1', logLevel: 'debug' },
- { duplicates: 1, entry: 'DEBUG 2', logLevel: 'debug' },
- { duplicates: 0, entry: 'TRACE 1', logLevel: 'trace' },
- ]);
- });
- });
- const emptyLogsModel: any = {
- hasUniqueLabels: false,
- rows: [],
- meta: [],
- series: [],
- };
- describe('dataFrameToLogsModel', () => {
- it('given empty series should return empty logs model', () => {
- expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
- });
- it('given series without correct series name should return empty logs model', () => {
- const series: DataFrame[] = [
- toDataFrame({
- fields: [],
- }),
- ];
- expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
- });
- it('given series without a time field should return empty logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'message',
- type: FieldType.string,
- values: [],
- },
- ],
- }),
- ];
- expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
- });
- it('given series without a string field should return empty logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: [],
- },
- ],
- }),
- ];
- expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
- });
- it('given one series should return expected logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: [
- 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- ],
- labels: {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['foo', 'bar'],
- },
- ],
- meta: {
- limit: 1000,
- },
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.hasUniqueLabels).toBeFalsy();
- expect(logsModel.rows).toHaveLength(2);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
- logLevel: 'info',
- uniqueLabels: {},
- uid: 'foo',
- },
- {
- entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
- logLevel: 'error',
- uniqueLabels: {},
- uid: 'bar',
- },
- ]);
- expect(logsModel.series).toHaveLength(2);
- expect(logsModel.series).toMatchObject([
- {
- name: 'info',
- fields: [
- { type: 'time', values: new ArrayVector([1556270891000, 1556289770000]) },
- { type: 'number', values: new ArrayVector([1, 0]) },
- ],
- },
- {
- name: 'error',
- fields: [
- { type: 'time', values: new ArrayVector([1556289770000]) },
- { type: 'number', values: new ArrayVector([1]) },
- ],
- },
- ]);
- expect(logsModel.meta).toHaveLength(2);
- expect(logsModel.meta![0]).toMatchObject({
- label: COMMON_LABELS,
- value: {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- },
- kind: LogsMetaKind.LabelsMap,
- });
- expect(logsModel.meta![1]).toMatchObject({
- label: LIMIT_LABEL,
- value: `1000 (2 returned)`,
- kind: LogsMetaKind.String,
- });
- });
- it('given one series with labels-field should return expected logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'labels',
- type: FieldType.other,
- values: [
- {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- },
- {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- },
- ],
- },
- {
- name: 'time',
- type: FieldType.time,
- values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: [
- 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- ],
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['foo', 'bar'],
- },
- ],
- meta: {
- limit: 1000,
- custom: {
- frameType: 'LabeledTimeValues',
- },
- },
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.hasUniqueLabels).toBeFalsy();
- expect(logsModel.rows).toHaveLength(2);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
- logLevel: 'info',
- uniqueLabels: {},
- uid: 'foo',
- },
- {
- entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
- logLevel: 'error',
- uniqueLabels: {},
- uid: 'bar',
- },
- ]);
- expect(logsModel.series).toHaveLength(2);
- expect(logsModel.series).toMatchObject([
- {
- name: 'info',
- fields: [
- { type: 'time', values: new ArrayVector([1556270891000, 1556289770000]) },
- { type: 'number', values: new ArrayVector([1, 0]) },
- ],
- },
- {
- name: 'error',
- fields: [
- { type: 'time', values: new ArrayVector([1556289770000]) },
- { type: 'number', values: new ArrayVector([1]) },
- ],
- },
- ]);
- expect(logsModel.meta).toHaveLength(2);
- expect(logsModel.meta![0]).toMatchObject({
- label: COMMON_LABELS,
- value: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
- kind: LogsMetaKind.LabelsMap,
- });
- expect(logsModel.meta![1]).toMatchObject({
- label: LIMIT_LABEL,
- value: `1000 (2 returned)`,
- kind: LogsMetaKind.String,
- });
- });
- it('given one series with labels-field it should work regardless the label-fields position', () => {
- const labels = {
- name: 'labels',
- type: FieldType.other,
- values: [
- {
- node: 'first',
- mode: 'slow',
- },
- ],
- };
- const time = {
- name: 'time',
- type: FieldType.time,
- values: ['2019-04-26T09:28:11.352440161Z'],
- };
- const line = {
- name: 'line',
- type: FieldType.string,
- values: ['line1'],
- };
- const meta = {
- custom: {
- frameType: 'LabeledTimeValues',
- },
- };
- const frame1 = new MutableDataFrame({
- meta,
- fields: [labels, time, line],
- });
- const frame2 = new MutableDataFrame({
- meta,
- fields: [time, labels, line],
- });
- const frame3 = new MutableDataFrame({
- meta,
- fields: [time, line, labels],
- });
- const logsModel1 = dataFrameToLogsModel([frame1], 1);
- expect(logsModel1.rows).toHaveLength(1);
- expect(logsModel1.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
- const logsModel2 = dataFrameToLogsModel([frame2], 1);
- expect(logsModel2.rows).toHaveLength(1);
- expect(logsModel2.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
- const logsModel3 = dataFrameToLogsModel([frame3], 1);
- expect(logsModel3.rows).toHaveLength(1);
- expect(logsModel3.rows[0].labels).toStrictEqual({ mode: 'slow', node: 'first' });
- });
- it('given one series with error should return expected logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: [
- 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- ],
- labels: {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- __error__: 'Failed while parsing',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['foo', 'bar'],
- },
- ],
- meta: {
- limit: 1000,
- custom: {
- error: 'Error when parsing some of the logs',
- },
- },
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.hasUniqueLabels).toBeFalsy();
- expect(logsModel.rows).toHaveLength(2);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana', __error__: 'Failed while parsing' },
- logLevel: 'info',
- uniqueLabels: {},
- uid: 'foo',
- },
- {
- entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana', __error__: 'Failed while parsing' },
- logLevel: 'error',
- uniqueLabels: {},
- uid: 'bar',
- },
- ]);
- expect(logsModel.series).toHaveLength(2);
- expect(logsModel.meta).toHaveLength(3);
- expect(logsModel.meta![0]).toMatchObject({
- label: COMMON_LABELS,
- value: series[0].fields[1].labels,
- kind: LogsMetaKind.LabelsMap,
- });
- expect(logsModel.meta![1]).toMatchObject({
- label: LIMIT_LABEL,
- value: `1000 (2 returned)`,
- kind: LogsMetaKind.String,
- });
- expect(logsModel.meta![2]).toMatchObject({
- label: '',
- value: 'Error when parsing some of the logs',
- kind: LogsMetaKind.Error,
- });
- });
- it('given one series without labels should return expected logs model', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: ['WARN boooo'],
- },
- {
- name: 'level',
- type: FieldType.string,
- values: ['dbug'],
- },
- ],
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.rows).toHaveLength(1);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 'WARN boooo',
- labels: {},
- logLevel: LogLevel.debug,
- uniqueLabels: {},
- },
- ]);
- });
- it('given multiple series with unique times should return expected logs model', () => {
- const series: DataFrame[] = [
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo'],
- labels: {
- foo: 'bar',
- baz: '1',
- level: 'dbug',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['0'],
- },
- ],
- }),
- toDataFrame({
- name: 'logs',
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:02Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: ['INFO 1', 'INFO 2'],
- labels: {
- foo: 'bar',
- baz: '2',
- level: 'err',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['1', '2'],
- },
- ],
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.hasUniqueLabels).toBeTruthy();
- expect(logsModel.rows).toHaveLength(3);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 'INFO 1',
- labels: { foo: 'bar', baz: '2' },
- logLevel: LogLevel.error,
- uniqueLabels: { baz: '2' },
- },
- {
- entry: 'WARN boooo',
- labels: { foo: 'bar', baz: '1' },
- logLevel: LogLevel.debug,
- uniqueLabels: { baz: '1' },
- },
- {
- entry: 'INFO 2',
- labels: { foo: 'bar', baz: '2' },
- logLevel: LogLevel.error,
- uniqueLabels: { baz: '2' },
- },
- ]);
- expect(logsModel.series).toHaveLength(2);
- expect(logsModel.series).toMatchObject([
- {
- name: 'error',
- fields: [
- { type: 'time', values: new ArrayVector([0, 1000, 2000]) },
- { type: 'number', values: new ArrayVector([1, 0, 1]) },
- ],
- },
- {
- name: 'debug',
- fields: [
- { type: 'time', values: new ArrayVector([1000, 2000]) },
- { type: 'number', values: new ArrayVector([1, 0]) },
- ],
- },
- ]);
- expect(logsModel.meta).toHaveLength(1);
- expect(logsModel.meta![0]).toMatchObject({
- label: COMMON_LABELS,
- value: {
- foo: 'bar',
- },
- kind: LogsMetaKind.LabelsMap,
- });
- });
- it('given multiple series with equal times should return expected logs model', () => {
- const series: DataFrame[] = [
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:00Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo 1'],
- labels: {
- foo: 'bar',
- baz: '1',
- level: 'dbug',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['0'],
- },
- ],
- }),
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo 2'],
- labels: {
- foo: 'bar',
- baz: '2',
- level: 'dbug',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['1'],
- },
- ],
- }),
- toDataFrame({
- name: 'logs',
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:01Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: ['INFO 1', 'INFO 2'],
- labels: {
- foo: 'bar',
- baz: '2',
- level: 'err',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['2', '3'],
- },
- ],
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.hasUniqueLabels).toBeTruthy();
- expect(logsModel.rows).toHaveLength(4);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 'WARN boooo 1',
- labels: { foo: 'bar', baz: '1' },
- logLevel: LogLevel.debug,
- uniqueLabels: { baz: '1' },
- },
- {
- entry: 'INFO 1',
- labels: { foo: 'bar', baz: '2' },
- logLevel: LogLevel.error,
- uniqueLabels: { baz: '2' },
- },
- {
- entry: 'WARN boooo 2',
- labels: { foo: 'bar', baz: '2' },
- logLevel: LogLevel.debug,
- uniqueLabels: { baz: '2' },
- },
- {
- entry: 'INFO 2',
- labels: { foo: 'bar', baz: '2' },
- logLevel: LogLevel.error,
- uniqueLabels: { baz: '2' },
- },
- ]);
- });
- it('should return expected line limit meta info when returned number of series equal the log limit', () => {
- const series: DataFrame[] = [
- new MutableDataFrame({
- fields: [
- {
- name: 'time',
- type: FieldType.time,
- values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
- },
- {
- name: 'message',
- type: FieldType.string,
- values: [
- 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
- 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
- ],
- labels: {
- filename: '/var/log/grafana/grafana.log',
- job: 'grafana',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['foo', 'bar'],
- },
- ],
- meta: {
- limit: 2,
- },
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1, { from: 1556270591353, to: 1556289770991 });
- expect(logsModel.meta).toHaveLength(2);
- expect(logsModel.meta![0]).toMatchObject({
- label: COMMON_LABELS,
- value: series[0].fields[1].labels,
- kind: LogsMetaKind.LabelsMap,
- });
- expect(logsModel.meta![1]).toMatchObject({
- label: LIMIT_LABEL,
- value: `2 reached, received logs cover 98.44% (5h 14min 40sec) of your selected time range (5h 19min 40sec)`,
- kind: LogsMetaKind.String,
- });
- });
- it('should fallback to row index if no id', () => {
- const series: DataFrame[] = [
- toDataFrame({
- labels: { foo: 'bar' },
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:00Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo 1'],
- },
- ],
- }),
- ];
- const logsModel = dataFrameToLogsModel(series, 1);
- expect(logsModel.rows[0].uid).toBe('0');
- });
- });
- describe('logSeriesToLogsModel', () => {
- it('should return correct metaData even if the data is empty', () => {
- const logSeries: DataFrame[] = [
- {
- fields: [],
- length: 0,
- refId: 'A',
- meta: {
- searchWords: ['test'],
- limit: 1000,
- stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
- custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
- preferredVisualisationType: 'logs',
- },
- },
- ];
- const metaData = {
- hasUniqueLabels: false,
- meta: [
- { label: LIMIT_LABEL, value: 1000, kind: 0 },
- { label: 'Total bytes processed', value: '97.0 kB', kind: 1 },
- ],
- rows: [],
- };
- expect(logSeriesToLogsModel(logSeries)).toMatchObject(metaData);
- });
- it('should return correct metaData when some data frames have empty fields', () => {
- const logSeries: DataFrame[] = [
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z', '1970-02-01T00:00:01Z', '1970-03-01T00:00:01Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo 0', 'WARN boooo 1', 'WARN boooo 2'],
- labels: {
- foo: 'bar',
- level: 'dbug',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['0', '1', '2'],
- },
- ],
- refId: 'A',
- meta: {
- searchWords: ['test'],
- limit: 1000,
- stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
- custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
- preferredVisualisationType: 'logs',
- },
- }),
- toDataFrame({
- fields: [],
- length: 0,
- refId: 'B',
- meta: {
- searchWords: ['test'],
- limit: 1000,
- stats: [{ displayName: 'Summary: total bytes processed', value: 97048, unit: 'decbytes' }],
- custom: { lokiQueryStatKey: 'Summary: total bytes processed' },
- preferredVisualisationType: 'logs',
- },
- }),
- ];
- const logsModel = dataFrameToLogsModel(logSeries, 0);
- expect(logsModel.meta).toMatchObject([
- { kind: 2, label: COMMON_LABELS, value: { foo: 'bar', level: 'dbug' } },
- { kind: 0, label: LIMIT_LABEL, value: 2000 },
- { kind: 1, label: 'Total bytes processed', value: '194 kB' },
- ]);
- expect(logsModel.rows).toHaveLength(3);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 'WARN boooo 0',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- {
- entry: 'WARN boooo 1',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- {
- entry: 'WARN boooo 2',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- ]);
- });
- it('should return empty string if message field is undefined', () => {
- const logSeries: DataFrame[] = [
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z', '1970-02-01T00:00:01Z', '1970-03-01T00:00:01Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['WARN boooo 0', undefined, 'WARN boooo 2'],
- labels: {
- foo: 'bar',
- level: 'dbug',
- },
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['0', '1', '2'],
- },
- ],
- refId: 'A',
- meta: {},
- }),
- ];
- const logsModel = dataFrameToLogsModel(logSeries, 0);
- expect(logsModel.rows).toHaveLength(3);
- expect(logsModel.rows).toMatchObject([
- {
- entry: 'WARN boooo 0',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- {
- entry: '',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- {
- entry: 'WARN boooo 2',
- labels: { foo: 'bar' },
- logLevel: LogLevel.debug,
- },
- ]);
- });
- it('should correctly get the log level if the message has ANSI color', () => {
- const logSeries: DataFrame[] = [
- toDataFrame({
- fields: [
- {
- name: 'ts',
- type: FieldType.time,
- values: ['1970-01-01T00:00:01Z'],
- },
- {
- name: 'line',
- type: FieldType.string,
- values: ['Line with ANSI \u001B[31mwarn\u001B[0m et dolor'],
- },
- {
- name: 'id',
- type: FieldType.string,
- values: ['0'],
- },
- ],
- refId: 'A',
- meta: {},
- }),
- ];
- const logsModel = dataFrameToLogsModel(logSeries, 0);
- expect(logsModel.rows).toHaveLength(1);
- expect(logsModel.rows[0].logLevel).toEqual(LogLevel.warn);
- });
- });
- describe('getSeriesProperties()', () => {
- it('sets a minimum bucket size', () => {
- const result = getSeriesProperties([], 2, undefined, 3, 123);
- expect(result.bucketSize).toBe(123);
- });
- it('does not adjust the bucketSize if there is no range', () => {
- const result = getSeriesProperties([], 30, undefined, 70);
- expect(result.bucketSize).toBe(2100);
- });
- it('does not adjust the bucketSize if the logs row times match the given range', () => {
- const rows: LogRowModel[] = [
- { entry: 'foo', timeEpochMs: 10 },
- { entry: 'bar', timeEpochMs: 20 },
- ] as any;
- const range = { from: 10, to: 20 };
- const result = getSeriesProperties(rows, 1, range, 2, 1);
- expect(result.bucketSize).toBe(2);
- expect(result.visibleRange).toMatchObject(range);
- });
- it('clamps the range and adjusts the bucketSize if the logs row times do not completely cover the given range', () => {
- const rows: LogRowModel[] = [
- { entry: 'foo', timeEpochMs: 10 },
- { entry: 'bar', timeEpochMs: 20 },
- ] as any;
- const range = { from: 0, to: 30 };
- const result = getSeriesProperties(rows, 3, range, 2, 1);
- // Bucketsize 6 gets shortened to 4 because of new visible range is 20ms vs original range being 30ms
- expect(result.bucketSize).toBe(4);
- // From time is also aligned to bucketSize (divisible by 4)
- expect(result.visibleRange).toMatchObject({ from: 8, to: 30 });
- });
- });
- describe('logs volume', () => {
- class TestDataQuery implements DataQuery {
- refId = 'a';
- target = '';
- }
- let volumeProvider: Observable<DataQueryResponse>,
- datasource: MockObservableDataSourceApi,
- request: DataQueryRequest<TestDataQuery>;
- function createFrame(labels: object, timestamps: number[], values: number[]) {
- return toDataFrame({
- fields: [
- { name: 'Time', type: FieldType.time, values: timestamps },
- {
- name: 'Number',
- type: FieldType.number,
- values,
- labels,
- },
- ],
- });
- }
- function createExpectedFields(levelName: string, timestamps: number[], values: number[]) {
- return [
- { name: 'Time', values: { buffer: timestamps } },
- {
- name: 'Value',
- config: { displayNameFromDS: levelName },
- values: { buffer: values },
- },
- ];
- }
- function setup(datasourceSetup: () => void) {
- datasourceSetup();
- request = {
- targets: [{ target: 'volume query 1' }, { target: 'volume query 2' }],
- scopedVars: {},
- } as unknown as DataQueryRequest<TestDataQuery>;
- volumeProvider = queryLogsVolume(datasource, request, {
- extractLevel: (dataFrame: DataFrame) => {
- return dataFrame.fields[1]!.labels!.level === 'error' ? LogLevel.error : LogLevel.unknown;
- },
- range: {
- from: dateTimeParse('2021-06-17 00:00:00', { timeZone: 'utc' }),
- to: dateTimeParse('2021-06-17 00:00:00', { timeZone: 'utc' }),
- raw: { from: '0', to: '1' },
- },
- targets: request.targets,
- });
- }
- function setupMultipleResults() {
- // level=unknown
- const resultAFrame1 = createFrame({ app: 'app01' }, [100, 200, 300], [5, 5, 5]);
- // level=error
- const resultAFrame2 = createFrame({ app: 'app01', level: 'error' }, [100, 200, 300], [0, 1, 0]);
- // level=unknown
- const resultBFrame1 = createFrame({ app: 'app02' }, [100, 200, 300], [1, 2, 3]);
- // level=error
- const resultBFrame2 = createFrame({ app: 'app02', level: 'error' }, [100, 200, 300], [1, 1, 1]);
- datasource = new MockObservableDataSourceApi('loki', [
- {
- data: [resultAFrame1, resultAFrame2],
- },
- {
- data: [resultBFrame1, resultBFrame2],
- },
- ]);
- }
- function setupErrorResponse() {
- datasource = new MockObservableDataSourceApi('loki', [], undefined, 'Error message');
- }
- it('aggregates data frames by level', async () => {
- setup(setupMultipleResults);
- await expect(volumeProvider).toEmitValuesWith((received) => {
- expect(received).toMatchObject([
- { state: LoadingState.Loading, error: undefined, data: [] },
- {
- state: LoadingState.Done,
- error: undefined,
- data: [
- {
- fields: createExpectedFields('unknown', [100, 200, 300], [6, 7, 8]),
- },
- {
- fields: createExpectedFields('error', [100, 200, 300], [1, 2, 1]),
- },
- ],
- },
- ]);
- });
- });
- it('returns error', async () => {
- setup(setupErrorResponse);
- await expect(volumeProvider).toEmitValuesWith((received) => {
- expect(received).toMatchObject([
- { state: LoadingState.Loading, error: undefined, data: [] },
- {
- state: LoadingState.Error,
- error: 'Error message',
- data: [],
- },
- 'Error message',
- ]);
- });
- });
- });
|