logs_model.ts 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. import _ from 'lodash';
  2. import { colors, getFlotPairs, ansicolor } from '@grafana/ui';
  3. import {
  4. Labels,
  5. LogLevel,
  6. DataFrame,
  7. findCommonLabels,
  8. findUniqueLabels,
  9. getLogLevel,
  10. FieldType,
  11. getLogLevelFromKey,
  12. LogRowModel,
  13. LogsModel,
  14. LogsMetaItem,
  15. LogsMetaKind,
  16. LogsDedupStrategy,
  17. GraphSeriesXY,
  18. dateTime,
  19. toUtc,
  20. NullValueMode,
  21. toDataFrame,
  22. FieldCache,
  23. } from '@grafana/data';
  24. import { getThemeColor } from 'app/core/utils/colors';
  25. import { hasAnsiCodes } from 'app/core/utils/text';
  26. import { sortInAscendingOrder } from 'app/core/utils/explore';
  27. import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
  28. export const LogLevelColor = {
  29. [LogLevel.critical]: colors[7],
  30. [LogLevel.warning]: colors[1],
  31. [LogLevel.error]: colors[4],
  32. [LogLevel.info]: colors[0],
  33. [LogLevel.debug]: colors[5],
  34. [LogLevel.trace]: colors[2],
  35. [LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'),
  36. };
  37. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  38. function isDuplicateRow(row: LogRowModel, other: LogRowModel, strategy: LogsDedupStrategy): boolean {
  39. switch (strategy) {
  40. case LogsDedupStrategy.exact:
  41. // Exact still strips dates
  42. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  43. case LogsDedupStrategy.numbers:
  44. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  45. case LogsDedupStrategy.signature:
  46. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  47. default:
  48. return false;
  49. }
  50. }
  51. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  52. if (strategy === LogsDedupStrategy.none) {
  53. return logs;
  54. }
  55. const dedupedRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  56. const rowCopy = { ...row };
  57. const previous = result[result.length - 1];
  58. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  59. previous.duplicates++;
  60. } else {
  61. rowCopy.duplicates = 0;
  62. result.push(rowCopy);
  63. }
  64. return result;
  65. }, []);
  66. return {
  67. ...logs,
  68. rows: dedupedRows,
  69. };
  70. }
  71. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  72. if (hiddenLogLevels.size === 0) {
  73. return logs;
  74. }
  75. const filteredRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  76. if (!hiddenLogLevels.has(row.logLevel)) {
  77. result.push(row);
  78. }
  79. return result;
  80. }, []);
  81. return {
  82. ...logs,
  83. rows: filteredRows,
  84. };
  85. }
  86. export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): GraphSeriesXY[] {
  87. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  88. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  89. // when executing queries & interval calculated and not here but this is a temporary fix.
  90. // intervalMs = intervalMs * 10;
  91. // Graph time series by log level
  92. const seriesByLevel: any = {};
  93. const bucketSize = intervalMs * 10;
  94. const seriesList: any[] = [];
  95. const sortedRows = rows.sort(sortInAscendingOrder);
  96. for (const row of sortedRows) {
  97. let series = seriesByLevel[row.logLevel];
  98. if (!series) {
  99. seriesByLevel[row.logLevel] = series = {
  100. lastTs: null,
  101. datapoints: [],
  102. alias: row.logLevel,
  103. color: LogLevelColor[row.logLevel],
  104. };
  105. seriesList.push(series);
  106. }
  107. // align time to bucket size - used Math.floor for calculation as time of the bucket
  108. // must be in the past (before Date.now()) to be displayed on the graph
  109. const time = Math.floor(row.timeEpochMs / bucketSize) * bucketSize;
  110. // Entry for time
  111. if (time === series.lastTs) {
  112. series.datapoints[series.datapoints.length - 1][0]++;
  113. } else {
  114. series.datapoints.push([1, time]);
  115. series.lastTs = time;
  116. }
  117. // add zero to other levels to aid stacking so each level series has same number of points
  118. for (const other of seriesList) {
  119. if (other !== series && other.lastTs !== time) {
  120. other.datapoints.push([0, time]);
  121. other.lastTs = time;
  122. }
  123. }
  124. }
  125. return seriesList.map(series => {
  126. series.datapoints.sort((a: number[], b: number[]) => {
  127. return a[1] - b[1];
  128. });
  129. // EEEP: converts GraphSeriesXY to DataFrame and back again!
  130. const data = toDataFrame(series);
  131. const points = getFlotPairs({
  132. xField: data.fields[1],
  133. yField: data.fields[0],
  134. nullValueMode: NullValueMode.Null,
  135. });
  136. const graphSeries: GraphSeriesXY = {
  137. color: series.color,
  138. label: series.alias,
  139. data: points,
  140. isVisible: true,
  141. yAxis: {
  142. index: 1,
  143. min: 0,
  144. tickDecimals: 0,
  145. },
  146. };
  147. return graphSeries;
  148. });
  149. }
  150. function isLogsData(series: DataFrame) {
  151. return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
  152. }
  153. export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number): LogsModel {
  154. const metricSeries: DataFrame[] = [];
  155. const logSeries: DataFrame[] = [];
  156. for (const series of dataFrame) {
  157. if (isLogsData(series)) {
  158. logSeries.push(series);
  159. continue;
  160. }
  161. metricSeries.push(series);
  162. }
  163. const logsModel = logSeriesToLogsModel(logSeries);
  164. if (logsModel) {
  165. if (metricSeries.length === 0) {
  166. logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs);
  167. } else {
  168. logsModel.series = getGraphSeriesModel(
  169. metricSeries,
  170. {},
  171. { showBars: true, showLines: false, showPoints: false },
  172. {
  173. asTable: false,
  174. isVisible: true,
  175. placement: 'under',
  176. }
  177. );
  178. }
  179. return logsModel;
  180. }
  181. return {
  182. hasUniqueLabels: false,
  183. rows: [],
  184. meta: [],
  185. series: [],
  186. };
  187. }
  188. export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
  189. if (logSeries.length === 0) {
  190. return undefined;
  191. }
  192. const allLabels: Labels[] = [];
  193. for (let n = 0; n < logSeries.length; n++) {
  194. const series = logSeries[n];
  195. if (series.labels) {
  196. allLabels.push(series.labels);
  197. }
  198. }
  199. let commonLabels: Labels = {};
  200. if (allLabels.length > 0) {
  201. commonLabels = findCommonLabels(allLabels);
  202. }
  203. const rows: LogRowModel[] = [];
  204. let hasUniqueLabels = false;
  205. for (let i = 0; i < logSeries.length; i++) {
  206. const series = logSeries[i];
  207. const fieldCache = new FieldCache(series);
  208. const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
  209. if (Object.keys(uniqueLabels).length > 0) {
  210. hasUniqueLabels = true;
  211. }
  212. const timeField = fieldCache.getFirstFieldOfType(FieldType.time);
  213. const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
  214. const logLevelField = fieldCache.getFieldByName('level');
  215. let seriesLogLevel: LogLevel | undefined = undefined;
  216. if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
  217. seriesLogLevel = getLogLevelFromKey(series.labels['level']);
  218. }
  219. for (let j = 0; j < series.length; j++) {
  220. const ts = timeField.values.get(j);
  221. const time = dateTime(ts);
  222. const timeEpochMs = time.valueOf();
  223. const timeFromNow = time.fromNow();
  224. const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
  225. const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
  226. let message = stringField.values.get(j);
  227. // This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
  228. message = typeof message === 'string' ? message : JSON.stringify(message);
  229. let logLevel = LogLevel.unknown;
  230. if (logLevelField) {
  231. logLevel = getLogLevelFromKey(logLevelField.values.get(j));
  232. } else if (seriesLogLevel) {
  233. logLevel = seriesLogLevel;
  234. } else {
  235. logLevel = getLogLevel(message);
  236. }
  237. const hasAnsi = hasAnsiCodes(message);
  238. const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
  239. rows.push({
  240. logLevel,
  241. timeFromNow,
  242. timeEpochMs,
  243. timeLocal,
  244. timeUtc,
  245. uniqueLabels,
  246. hasAnsi,
  247. searchWords,
  248. entry: hasAnsi ? ansicolor.strip(message) : message,
  249. raw: message,
  250. labels: series.labels,
  251. timestamp: ts,
  252. });
  253. }
  254. }
  255. // Meta data to display in status
  256. const meta: LogsMetaItem[] = [];
  257. if (_.size(commonLabels) > 0) {
  258. meta.push({
  259. label: 'Common labels',
  260. value: commonLabels,
  261. kind: LogsMetaKind.LabelsMap,
  262. });
  263. }
  264. const limits = logSeries.filter(series => series.meta && series.meta.limit);
  265. if (limits.length > 0) {
  266. meta.push({
  267. label: 'Limit',
  268. value: `${limits[0].meta.limit} (${rows.length} returned)`,
  269. kind: LogsMetaKind.String,
  270. });
  271. }
  272. return {
  273. hasUniqueLabels,
  274. meta,
  275. rows,
  276. };
  277. }