elastic_response.ts 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789
  1. import { clone, filter, find, identity, isArray, keys, map, uniq, values as _values } from 'lodash';
  2. import {
  3. DataQueryResponse,
  4. DataFrame,
  5. toDataFrame,
  6. FieldType,
  7. MutableDataFrame,
  8. PreferredVisualisationType,
  9. } from '@grafana/data';
  10. import TableModel from 'app/core/table_model';
  11. import flatten from 'app/core/utils/flatten';
  12. import {
  13. ExtendedStatMetaType,
  14. isMetricAggregationWithField,
  15. TopMetrics,
  16. } from './components/QueryEditor/MetricAggregationsEditor/aggregations';
  17. import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
  18. import * as queryDef from './query_def';
  19. import { ElasticsearchAggregation, ElasticsearchQuery } from './types';
  20. import { describeMetric, getScriptValue } from './utils';
  21. const HIGHLIGHT_TAGS_EXP = `${queryDef.highlightTags.pre}([^@]+)${queryDef.highlightTags.post}`;
  22. type TopMetricMetric = Record<string, number>;
  23. interface TopMetricBucket {
  24. top: Array<{
  25. metrics: TopMetricMetric;
  26. }>;
  27. }
  28. export class ElasticResponse {
  29. constructor(private targets: ElasticsearchQuery[], private response: any) {
  30. this.targets = targets;
  31. this.response = response;
  32. }
  33. processMetrics(esAgg: any, target: ElasticsearchQuery, seriesList: any, props: any) {
  34. let newSeries: any;
  35. for (let y = 0; y < target.metrics!.length; y++) {
  36. const metric = target.metrics![y];
  37. if (metric.hide) {
  38. continue;
  39. }
  40. switch (metric.type) {
  41. case 'count': {
  42. newSeries = { datapoints: [], metric: 'count', props, refId: target.refId };
  43. for (let i = 0; i < esAgg.buckets.length; i++) {
  44. const bucket = esAgg.buckets[i];
  45. const value = bucket.doc_count;
  46. newSeries.datapoints.push([value, bucket.key]);
  47. }
  48. seriesList.push(newSeries);
  49. break;
  50. }
  51. case 'percentiles': {
  52. if (esAgg.buckets.length === 0) {
  53. break;
  54. }
  55. const firstBucket = esAgg.buckets[0];
  56. const percentiles = firstBucket[metric.id].values;
  57. for (const percentileName in percentiles) {
  58. newSeries = {
  59. datapoints: [],
  60. metric: 'p' + percentileName,
  61. props: props,
  62. field: metric.field,
  63. refId: target.refId,
  64. };
  65. for (let i = 0; i < esAgg.buckets.length; i++) {
  66. const bucket = esAgg.buckets[i];
  67. const values = bucket[metric.id].values;
  68. newSeries.datapoints.push([values[percentileName], bucket.key]);
  69. }
  70. seriesList.push(newSeries);
  71. }
  72. break;
  73. }
  74. case 'extended_stats': {
  75. for (const statName in metric.meta) {
  76. if (!metric.meta[statName as ExtendedStatMetaType]) {
  77. continue;
  78. }
  79. newSeries = {
  80. datapoints: [],
  81. metric: statName,
  82. props: props,
  83. field: metric.field,
  84. refId: target.refId,
  85. };
  86. for (let i = 0; i < esAgg.buckets.length; i++) {
  87. const bucket = esAgg.buckets[i];
  88. const stats = bucket[metric.id];
  89. // add stats that are in nested obj to top level obj
  90. stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
  91. stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
  92. newSeries.datapoints.push([stats[statName], bucket.key]);
  93. }
  94. seriesList.push(newSeries);
  95. }
  96. break;
  97. }
  98. case 'top_metrics': {
  99. if (metric.settings?.metrics?.length) {
  100. for (const metricField of metric.settings?.metrics) {
  101. newSeries = {
  102. datapoints: [],
  103. metric: metric.type,
  104. props: props,
  105. refId: target.refId,
  106. field: metricField,
  107. };
  108. for (let i = 0; i < esAgg.buckets.length; i++) {
  109. const bucket = esAgg.buckets[i];
  110. const stats = bucket[metric.id] as TopMetricBucket;
  111. const values = stats.top.map((hit) => {
  112. if (hit.metrics[metricField]) {
  113. return hit.metrics[metricField];
  114. }
  115. return null;
  116. });
  117. const point = [values[values.length - 1], bucket.key];
  118. newSeries.datapoints.push(point);
  119. }
  120. seriesList.push(newSeries);
  121. }
  122. }
  123. break;
  124. }
  125. default: {
  126. newSeries = {
  127. datapoints: [],
  128. metric: metric.type,
  129. metricId: metric.id,
  130. props: props,
  131. refId: target.refId,
  132. };
  133. if (isMetricAggregationWithField(metric)) {
  134. newSeries.field = metric.field;
  135. }
  136. for (let i = 0; i < esAgg.buckets.length; i++) {
  137. const bucket = esAgg.buckets[i];
  138. const value = bucket[metric.id];
  139. if (value !== undefined) {
  140. if (value.normalized_value) {
  141. newSeries.datapoints.push([value.normalized_value, bucket.key]);
  142. } else {
  143. newSeries.datapoints.push([value.value, bucket.key]);
  144. }
  145. }
  146. }
  147. seriesList.push(newSeries);
  148. break;
  149. }
  150. }
  151. }
  152. }
  153. processAggregationDocs(
  154. esAgg: any,
  155. aggDef: ElasticsearchAggregation,
  156. target: ElasticsearchQuery,
  157. table: any,
  158. props: any
  159. ) {
  160. // add columns
  161. if (table.columns.length === 0) {
  162. for (const propKey of keys(props)) {
  163. table.addColumn({ text: propKey, filterable: true });
  164. }
  165. table.addColumn({ text: aggDef.field, filterable: true });
  166. }
  167. // helper func to add values to value array
  168. const addMetricValue = (values: any[], metricName: string, value: any) => {
  169. table.addColumn({ text: metricName });
  170. values.push(value);
  171. };
  172. const buckets = isArray(esAgg.buckets) ? esAgg.buckets : [esAgg.buckets];
  173. for (const bucket of buckets) {
  174. const values = [];
  175. for (const propValues of _values(props)) {
  176. values.push(propValues);
  177. }
  178. // add bucket key (value)
  179. values.push(bucket.key);
  180. for (const metric of target.metrics || []) {
  181. switch (metric.type) {
  182. case 'count': {
  183. addMetricValue(values, this.getMetricName(metric.type), bucket.doc_count);
  184. break;
  185. }
  186. case 'extended_stats': {
  187. for (const statName in metric.meta) {
  188. if (!metric.meta[statName as ExtendedStatMetaType]) {
  189. continue;
  190. }
  191. const stats = bucket[metric.id];
  192. // add stats that are in nested obj to top level obj
  193. stats.std_deviation_bounds_upper = stats.std_deviation_bounds.upper;
  194. stats.std_deviation_bounds_lower = stats.std_deviation_bounds.lower;
  195. addMetricValue(values, this.getMetricName(statName as ExtendedStatMetaType), stats[statName]);
  196. }
  197. break;
  198. }
  199. case 'percentiles': {
  200. const percentiles = bucket[metric.id].values;
  201. for (const percentileName in percentiles) {
  202. addMetricValue(values, `p${percentileName} ${metric.field}`, percentiles[percentileName]);
  203. }
  204. break;
  205. }
  206. case 'top_metrics': {
  207. const baseName = this.getMetricName(metric.type);
  208. if (metric.settings?.metrics) {
  209. for (const metricField of metric.settings.metrics) {
  210. // If we selected more than one metric we also add each metric name
  211. const metricName = metric.settings.metrics.length > 1 ? `${baseName} ${metricField}` : baseName;
  212. const stats = bucket[metric.id] as TopMetricBucket;
  213. // Size of top_metrics is fixed to 1.
  214. addMetricValue(values, metricName, stats.top[0].metrics[metricField]);
  215. }
  216. }
  217. break;
  218. }
  219. default: {
  220. let metricName = this.getMetricName(metric.type);
  221. const otherMetrics = filter(target.metrics, { type: metric.type });
  222. // if more of the same metric type include field field name in property
  223. if (otherMetrics.length > 1) {
  224. if (isMetricAggregationWithField(metric)) {
  225. metricName += ' ' + metric.field;
  226. }
  227. if (metric.type === 'bucket_script') {
  228. //Use the formula in the column name
  229. metricName = getScriptValue(metric);
  230. }
  231. }
  232. addMetricValue(values, metricName, bucket[metric.id].value);
  233. break;
  234. }
  235. }
  236. }
  237. table.rows.push(values);
  238. }
  239. }
  240. // This is quite complex
  241. // need to recurse down the nested buckets to build series
  242. processBuckets(aggs: any, target: ElasticsearchQuery, seriesList: any, table: TableModel, props: any, depth: number) {
  243. let bucket, aggDef: any, esAgg, aggId;
  244. const maxDepth = target.bucketAggs!.length - 1;
  245. for (aggId in aggs) {
  246. aggDef = find(target.bucketAggs, { id: aggId });
  247. esAgg = aggs[aggId];
  248. if (!aggDef) {
  249. continue;
  250. }
  251. if (depth === maxDepth) {
  252. if (aggDef.type === 'date_histogram') {
  253. this.processMetrics(esAgg, target, seriesList, props);
  254. } else {
  255. this.processAggregationDocs(esAgg, aggDef, target, table, props);
  256. }
  257. } else {
  258. for (const nameIndex in esAgg.buckets) {
  259. bucket = esAgg.buckets[nameIndex];
  260. props = clone(props);
  261. if (bucket.key !== void 0) {
  262. props[aggDef.field] = bucket.key;
  263. } else {
  264. props['filter'] = nameIndex;
  265. }
  266. if (bucket.key_as_string) {
  267. props[aggDef.field] = bucket.key_as_string;
  268. }
  269. this.processBuckets(bucket, target, seriesList, table, props, depth + 1);
  270. }
  271. }
  272. }
  273. }
  274. private getMetricName(metric: string): string {
  275. const metricDef = Object.entries(metricAggregationConfig)
  276. .filter(([key]) => key === metric)
  277. .map(([_, value]) => value)[0];
  278. if (metricDef) {
  279. return metricDef.label;
  280. }
  281. const extendedStat = queryDef.extendedStats.find((e) => e.value === metric);
  282. if (extendedStat) {
  283. return extendedStat.label;
  284. }
  285. return metric;
  286. }
  287. private getSeriesName(series: any, target: ElasticsearchQuery, dedup: boolean) {
  288. let metricName = this.getMetricName(series.metric);
  289. if (target.alias) {
  290. const regex = /\{\{([\s\S]+?)\}\}/g;
  291. return target.alias.replace(regex, (match: any, g1: any, g2: any) => {
  292. const group = g1 || g2;
  293. if (group.indexOf('term ') === 0) {
  294. return series.props[group.substring(5)];
  295. }
  296. if (series.props[group] !== void 0) {
  297. return series.props[group];
  298. }
  299. if (group === 'metric') {
  300. return metricName;
  301. }
  302. if (group === 'field') {
  303. return series.field || '';
  304. }
  305. return match;
  306. });
  307. }
  308. if (queryDef.isPipelineAgg(series.metric)) {
  309. if (series.metric && queryDef.isPipelineAggWithMultipleBucketPaths(series.metric)) {
  310. const agg: any = find(target.metrics, { id: series.metricId });
  311. if (agg && agg.settings.script) {
  312. metricName = getScriptValue(agg);
  313. for (const pv of agg.pipelineVariables) {
  314. const appliedAgg: any = find(target.metrics, { id: pv.pipelineAgg });
  315. if (appliedAgg) {
  316. metricName = metricName.replace('params.' + pv.name, describeMetric(appliedAgg));
  317. }
  318. }
  319. } else {
  320. metricName = 'Unset';
  321. }
  322. } else {
  323. const appliedAgg: any = find(target.metrics, { id: series.field });
  324. if (appliedAgg) {
  325. metricName += ' ' + describeMetric(appliedAgg);
  326. } else {
  327. metricName = 'Unset';
  328. }
  329. }
  330. } else if (series.field) {
  331. metricName += ' ' + series.field;
  332. }
  333. const propKeys = keys(series.props);
  334. if (propKeys.length === 0) {
  335. return metricName;
  336. }
  337. let name = '';
  338. for (const propName in series.props) {
  339. name += series.props[propName] + ' ';
  340. }
  341. if (dedup) {
  342. return name.trim() + ' ' + metricName;
  343. }
  344. return name.trim();
  345. }
  346. nameSeries(seriesList: any, target: ElasticsearchQuery) {
  347. const metricTypeCount = uniq(map(seriesList, 'metric')).length;
  348. const hasTopMetricWithMultipleMetrics = (
  349. target.metrics?.filter((m) => m.type === 'top_metrics') as TopMetrics[]
  350. ).some((m) => (m?.settings?.metrics?.length || 0) > 1);
  351. for (let i = 0; i < seriesList.length; i++) {
  352. const series = seriesList[i];
  353. series.target = this.getSeriesName(series, target, metricTypeCount > 1 || hasTopMetricWithMultipleMetrics);
  354. }
  355. }
  356. processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[], target: ElasticsearchQuery) {
  357. const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+
  358. const series: any = {
  359. target: target.refId,
  360. type: 'docs',
  361. refId: target.refId,
  362. datapoints: [],
  363. total: hitsTotal,
  364. filterable: true,
  365. };
  366. let propName, hit, doc: any, i;
  367. for (i = 0; i < hits.hits.length; i++) {
  368. hit = hits.hits[i];
  369. doc = {
  370. _id: hit._id,
  371. _type: hit._type,
  372. _index: hit._index,
  373. sort: hit.sort,
  374. highlight: hit.highlight,
  375. };
  376. if (hit._source) {
  377. for (propName in hit._source) {
  378. doc[propName] = hit._source[propName];
  379. }
  380. }
  381. for (propName in hit.fields) {
  382. doc[propName] = hit.fields[propName];
  383. }
  384. series.datapoints.push(doc);
  385. }
  386. seriesList.push(series);
  387. }
  388. trimDatapoints(aggregations: any, target: ElasticsearchQuery) {
  389. const histogram: any = find(target.bucketAggs, { type: 'date_histogram' });
  390. const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
  391. if (shouldDropFirstAndLast) {
  392. const trim = histogram.settings.trimEdges;
  393. for (const prop in aggregations) {
  394. const points = aggregations[prop];
  395. if (points.datapoints.length > trim * 2) {
  396. points.datapoints = points.datapoints.slice(trim, points.datapoints.length - trim);
  397. }
  398. }
  399. }
  400. }
  401. getErrorFromElasticResponse(response: any, err: any) {
  402. const result: any = {};
  403. result.data = JSON.stringify(err, null, 4);
  404. if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
  405. result.message = err.root_cause[0].reason;
  406. } else {
  407. result.message = err.reason || 'Unknown elastic error response';
  408. }
  409. if (response.$$config) {
  410. result.config = response.$$config;
  411. }
  412. return result;
  413. }
  414. getTimeSeries() {
  415. if (this.targets.some((target) => queryDef.hasMetricOfType(target, 'raw_data'))) {
  416. return this.processResponseToDataFrames(false);
  417. }
  418. return this.processResponseToSeries();
  419. }
  420. getLogs(logMessageField?: string, logLevelField?: string): DataQueryResponse {
  421. return this.processResponseToDataFrames(true, logMessageField, logLevelField);
  422. }
  423. private processResponseToDataFrames(
  424. isLogsRequest: boolean,
  425. logMessageField?: string,
  426. logLevelField?: string
  427. ): DataQueryResponse {
  428. const dataFrame: DataFrame[] = [];
  429. for (let n = 0; n < this.response.responses.length; n++) {
  430. const response = this.response.responses[n];
  431. if (response.error) {
  432. throw this.getErrorFromElasticResponse(this.response, response.error);
  433. }
  434. if (response.hits) {
  435. const { propNames, docs } = flattenHits(response.hits.hits);
  436. const series = docs.length
  437. ? createEmptyDataFrame(
  438. propNames.map(toNameTypePair(docs)),
  439. isLogsRequest,
  440. this.targets[0].timeField,
  441. logMessageField,
  442. logLevelField
  443. )
  444. : createEmptyDataFrame([], isLogsRequest);
  445. if (isLogsRequest) {
  446. addPreferredVisualisationType(series, 'logs');
  447. }
  448. // Add a row for each document
  449. for (const doc of docs) {
  450. if (logLevelField) {
  451. // Remap level field based on the datasource config. This field is
  452. // then used in explore to figure out the log level. We may rewrite
  453. // some actual data in the level field if they are different.
  454. doc['level'] = doc[logLevelField];
  455. }
  456. // When highlighting exists, we need to collect all the highlighted
  457. // phrases and add them to the DataFrame's meta.searchWords array.
  458. if (doc.highlight) {
  459. // There might be multiple words so we need two versions of the
  460. // regular expression. One to match gobally, when used with part.match,
  461. // it returns and array of matches. The second one is used to capture the
  462. // values between the tags.
  463. const globalHighlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP, 'g');
  464. const highlightWordRegex = new RegExp(HIGHLIGHT_TAGS_EXP);
  465. const newSearchWords = Object.keys(doc.highlight)
  466. .flatMap((key) => {
  467. return doc.highlight[key].flatMap((line: string) => {
  468. const matchedPhrases = line.match(globalHighlightWordRegex);
  469. if (!matchedPhrases) {
  470. return [];
  471. }
  472. return matchedPhrases.map((part) => {
  473. const matches = part.match(highlightWordRegex);
  474. return (matches && matches[1]) || null;
  475. });
  476. });
  477. })
  478. .filter(identity);
  479. // If meta and searchWords already exists, add the words and
  480. // deduplicate otherwise create a new set of search words.
  481. const searchWords = series.meta?.searchWords
  482. ? uniq([...series.meta.searchWords, ...newSearchWords])
  483. : [...newSearchWords];
  484. series.meta = series.meta ? { ...series.meta, searchWords } : { searchWords };
  485. }
  486. series.add(doc);
  487. }
  488. const target = this.targets[n];
  489. series.refId = target.refId;
  490. dataFrame.push(series);
  491. }
  492. if (response.aggregations) {
  493. const aggregations = response.aggregations;
  494. const target = this.targets[n];
  495. const tmpSeriesList: any[] = [];
  496. const table = new TableModel();
  497. this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
  498. this.trimDatapoints(tmpSeriesList, target);
  499. this.nameSeries(tmpSeriesList, target);
  500. if (table.rows.length > 0) {
  501. const series = toDataFrame(table);
  502. series.refId = target.refId;
  503. dataFrame.push(series);
  504. }
  505. for (let y = 0; y < tmpSeriesList.length; y++) {
  506. let series = toDataFrame(tmpSeriesList[y]);
  507. // When log results, show aggregations only in graph. Log fields are then going to be shown in table.
  508. if (isLogsRequest) {
  509. addPreferredVisualisationType(series, 'graph');
  510. }
  511. series.refId = target.refId;
  512. dataFrame.push(series);
  513. }
  514. }
  515. }
  516. return { data: dataFrame };
  517. }
  518. processResponseToSeries = () => {
  519. const seriesList = [];
  520. for (let i = 0; i < this.response.responses.length; i++) {
  521. const response = this.response.responses[i];
  522. const target = this.targets[i];
  523. if (response.error) {
  524. throw this.getErrorFromElasticResponse(this.response, response.error);
  525. }
  526. if (response.hits && response.hits.hits.length > 0) {
  527. this.processHits(response.hits, seriesList, target);
  528. }
  529. if (response.aggregations) {
  530. const aggregations = response.aggregations;
  531. const target = this.targets[i];
  532. const tmpSeriesList: any[] = [];
  533. const table = new TableModel();
  534. table.refId = target.refId;
  535. this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
  536. this.trimDatapoints(tmpSeriesList, target);
  537. this.nameSeries(tmpSeriesList, target);
  538. for (let y = 0; y < tmpSeriesList.length; y++) {
  539. seriesList.push(tmpSeriesList[y]);
  540. }
  541. if (table.rows.length > 0) {
  542. seriesList.push(table);
  543. }
  544. }
  545. }
  546. return { data: seriesList };
  547. };
  548. }
  549. type Doc = {
  550. _id: string;
  551. _type: string;
  552. _index: string;
  553. _source?: any;
  554. sort?: Array<string | number>;
  555. highlight?: Record<string, string[]>;
  556. };
  557. /**
  558. * Flatten the docs from response mainly the _source part which can be nested. This flattens it so that it is one level
  559. * deep and the keys are: `level1Name.level2Name...`. Also returns list of all properties from all the docs (not all
  560. * docs have to have the same keys).
  561. * @param hits
  562. */
  563. const flattenHits = (hits: Doc[]): { docs: Array<Record<string, any>>; propNames: string[] } => {
  564. const docs: any[] = [];
  565. // We keep a list of all props so that we can create all the fields in the dataFrame, this can lead
  566. // to wide sparse dataframes in case the scheme is different per document.
  567. let propNames: string[] = [];
  568. for (const hit of hits) {
  569. const flattened = hit._source ? flatten(hit._source) : {};
  570. const doc = {
  571. _id: hit._id,
  572. _type: hit._type,
  573. _index: hit._index,
  574. sort: hit.sort,
  575. highlight: hit.highlight,
  576. _source: { ...flattened },
  577. ...flattened,
  578. };
  579. for (const propName of Object.keys(doc)) {
  580. if (propNames.indexOf(propName) === -1) {
  581. propNames.push(propName);
  582. }
  583. }
  584. docs.push(doc);
  585. }
  586. propNames.sort();
  587. return { docs, propNames };
  588. };
  589. /**
  590. * Create empty dataframe but with created fields. Fields are based from propNames (should be from the response) and
  591. * also from configuration specified fields for message, time, and level.
  592. * @param propNames
  593. * @param timeField
  594. * @param logMessageField
  595. * @param logLevelField
  596. */
  597. const createEmptyDataFrame = (
  598. props: Array<[string, FieldType]>,
  599. isLogsRequest: boolean,
  600. timeField?: string,
  601. logMessageField?: string,
  602. logLevelField?: string
  603. ): MutableDataFrame => {
  604. const series = new MutableDataFrame({ fields: [] });
  605. if (timeField) {
  606. series.addField({
  607. config: {
  608. filterable: true,
  609. },
  610. name: timeField,
  611. type: FieldType.time,
  612. });
  613. }
  614. if (logMessageField) {
  615. const f = series.addField({
  616. name: logMessageField,
  617. type: FieldType.string,
  618. });
  619. series.setParser(f, (v: any) => {
  620. return v || '';
  621. });
  622. }
  623. if (logLevelField) {
  624. const f = series.addField({
  625. name: 'level',
  626. type: FieldType.string,
  627. });
  628. series.setParser(f, (v: any) => {
  629. return v || '';
  630. });
  631. }
  632. const fieldNames = series.fields.map((field) => field.name);
  633. for (const [name, type] of props) {
  634. // Do not duplicate fields. This can mean that we will shadow some fields.
  635. if (fieldNames.includes(name)) {
  636. continue;
  637. }
  638. // Do not add _source field (besides logs) as we are showing each _source field in table instead.
  639. if (!isLogsRequest && name === '_source') {
  640. continue;
  641. }
  642. const f = series.addField({
  643. config: {
  644. filterable: true,
  645. },
  646. name,
  647. type,
  648. });
  649. series.setParser(f, (v: any) => {
  650. return v || '';
  651. });
  652. }
  653. return series;
  654. };
  655. const addPreferredVisualisationType = (series: any, type: PreferredVisualisationType) => {
  656. let s = series;
  657. s.meta
  658. ? (s.meta.preferredVisualisationType = type)
  659. : (s.meta = {
  660. preferredVisualisationType: type,
  661. });
  662. };
  663. const toNameTypePair =
  664. (docs: Array<Record<string, any>>) =>
  665. (propName: string): [string, FieldType] =>
  666. [propName, guessType(docs.find((doc) => doc[propName] !== undefined)?.[propName])];
  667. /**
  668. * Trying to guess data type from its value. This is far from perfect, as in order to have accurate guess
  669. * we should have access to the elasticsearch mapping, but it covers the most common use cases for numbers, strings & arrays.
  670. */
  671. const guessType = (value: unknown): FieldType => {
  672. switch (typeof value) {
  673. case 'number':
  674. return FieldType.number;
  675. case 'string':
  676. return FieldType.string;
  677. default:
  678. return FieldType.other;
  679. }
  680. };