logs_model.ts 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421
  1. import _ from 'lodash';
  2. import ansicolor from 'vendor/ansicolor/ansicolor';
  3. import { colors, getFlotPairs } from '@grafana/ui';
  4. import {
  5. Labels,
  6. LogLevel,
  7. DataFrame,
  8. findCommonLabels,
  9. findUniqueLabels,
  10. getLogLevel,
  11. FieldCache,
  12. FieldType,
  13. getLogLevelFromKey,
  14. LogRowModel,
  15. LogsModel,
  16. LogsMetaItem,
  17. LogsMetaKind,
  18. LogsParser,
  19. LogLabelStatsModel,
  20. LogsDedupStrategy,
  21. GraphSeriesXY,
  22. LoadingState,
  23. dateTime,
  24. toUtc,
  25. NullValueMode,
  26. } from '@grafana/data';
  27. import { getThemeColor } from 'app/core/utils/colors';
  28. import { hasAnsiCodes } from 'app/core/utils/text';
  29. import { getGraphSeriesModel } from 'app/plugins/panel/graph2/getGraphSeriesModel';
  30. export const LogLevelColor = {
  31. [LogLevel.critical]: colors[7],
  32. [LogLevel.warning]: colors[1],
  33. [LogLevel.error]: colors[4],
  34. [LogLevel.info]: colors[0],
  35. [LogLevel.debug]: colors[5],
  36. [LogLevel.trace]: colors[2],
  37. [LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'),
  38. };
  39. export enum LogsDedupDescription {
  40. none = 'No de-duplication',
  41. exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
  42. numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.',
  43. signature = 'De-duplication of successive lines that have identical punctuation and whitespace.',
  44. }
  45. const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/;
  46. export const LogsParsers: { [name: string]: LogsParser } = {
  47. JSON: {
  48. buildMatcher: label => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
  49. getFields: line => {
  50. const fields: string[] = [];
  51. try {
  52. const parsed = JSON.parse(line);
  53. _.map(parsed, (value, key) => {
  54. const fieldMatcher = new RegExp(`"${key}"\\s*:\\s*"?${_.escapeRegExp(JSON.stringify(value))}"?`);
  55. const match = line.match(fieldMatcher);
  56. if (match) {
  57. fields.push(match[0]);
  58. }
  59. });
  60. } catch {}
  61. return fields;
  62. },
  63. getLabelFromField: field => (field.match(/^"(\w+)"\s*:/) || [])[1],
  64. getValueFromField: field => (field.match(/:\s*(.*)$/) || [])[1],
  65. test: line => {
  66. try {
  67. return JSON.parse(line);
  68. } catch (error) {}
  69. },
  70. },
  71. logfmt: {
  72. buildMatcher: label => new RegExp(`(?:^|\\s)${label}=("[^"]*"|\\S+)`),
  73. getFields: line => {
  74. const fields: string[] = [];
  75. line.replace(new RegExp(LOGFMT_REGEXP, 'g'), substring => {
  76. fields.push(substring.trim());
  77. return '';
  78. });
  79. return fields;
  80. },
  81. getLabelFromField: field => (field.match(LOGFMT_REGEXP) || [])[1],
  82. getValueFromField: field => (field.match(LOGFMT_REGEXP) || [])[2],
  83. test: line => LOGFMT_REGEXP.test(line),
  84. },
  85. };
  86. export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): LogLabelStatsModel[] {
  87. // Consider only rows that satisfy the matcher
  88. const rowsWithField = rows.filter(row => extractor.test(row.entry));
  89. const rowCount = rowsWithField.length;
  90. // Get field value counts for eligible rows
  91. const countsByValue = _.countBy(rowsWithField, row => (row as LogRowModel).entry.match(extractor)[1]);
  92. const sortedCounts = _.chain(countsByValue)
  93. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  94. .sortBy('count')
  95. .reverse()
  96. .value();
  97. return sortedCounts;
  98. }
  99. export function calculateLogsLabelStats(rows: LogRowModel[], label: string): LogLabelStatsModel[] {
  100. // Consider only rows that have the given label
  101. const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
  102. const rowCount = rowsWithLabel.length;
  103. // Get label value counts for eligible rows
  104. const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRowModel).labels[label]);
  105. const sortedCounts = _.chain(countsByValue)
  106. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  107. .sortBy('count')
  108. .reverse()
  109. .value();
  110. return sortedCounts;
  111. }
  112. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  113. function isDuplicateRow(row: LogRowModel, other: LogRowModel, strategy: LogsDedupStrategy): boolean {
  114. switch (strategy) {
  115. case LogsDedupStrategy.exact:
  116. // Exact still strips dates
  117. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  118. case LogsDedupStrategy.numbers:
  119. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  120. case LogsDedupStrategy.signature:
  121. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  122. default:
  123. return false;
  124. }
  125. }
  126. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  127. if (strategy === LogsDedupStrategy.none) {
  128. return logs;
  129. }
  130. const dedupedRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  131. const rowCopy = { ...row };
  132. const previous = result[result.length - 1];
  133. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  134. previous.duplicates++;
  135. } else {
  136. rowCopy.duplicates = 0;
  137. result.push(rowCopy);
  138. }
  139. return result;
  140. }, []);
  141. return {
  142. ...logs,
  143. rows: dedupedRows,
  144. };
  145. }
  146. export function getParser(line: string): LogsParser {
  147. let parser;
  148. try {
  149. if (LogsParsers.JSON.test(line)) {
  150. parser = LogsParsers.JSON;
  151. }
  152. } catch (error) {}
  153. if (!parser && LogsParsers.logfmt.test(line)) {
  154. parser = LogsParsers.logfmt;
  155. }
  156. return parser;
  157. }
  158. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  159. if (hiddenLogLevels.size === 0) {
  160. return logs;
  161. }
  162. const filteredRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  163. if (!hiddenLogLevels.has(row.logLevel)) {
  164. result.push(row);
  165. }
  166. return result;
  167. }, []);
  168. return {
  169. ...logs,
  170. rows: filteredRows,
  171. };
  172. }
  173. export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): GraphSeriesXY[] {
  174. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  175. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  176. // when executing queries & interval calculated and not here but this is a temporary fix.
  177. // intervalMs = intervalMs * 10;
  178. // Graph time series by log level
  179. const seriesByLevel: any = {};
  180. const bucketSize = intervalMs * 10;
  181. const seriesList: any[] = [];
  182. for (const row of rows) {
  183. let series = seriesByLevel[row.logLevel];
  184. if (!series) {
  185. seriesByLevel[row.logLevel] = series = {
  186. lastTs: null,
  187. datapoints: [],
  188. alias: row.logLevel,
  189. color: LogLevelColor[row.logLevel],
  190. };
  191. seriesList.push(series);
  192. }
  193. // align time to bucket size
  194. const time = Math.round(row.timeEpochMs / bucketSize) * bucketSize;
  195. // Entry for time
  196. if (time === series.lastTs) {
  197. series.datapoints[series.datapoints.length - 1][0]++;
  198. } else {
  199. series.datapoints.push([1, time]);
  200. series.lastTs = time;
  201. }
  202. // add zero to other levels to aid stacking so each level series has same number of points
  203. for (const other of seriesList) {
  204. if (other !== series && other.lastTs !== time) {
  205. other.datapoints.push([0, time]);
  206. other.lastTs = time;
  207. }
  208. }
  209. }
  210. return seriesList.map(series => {
  211. series.datapoints.sort((a: number[], b: number[]) => {
  212. return a[1] - b[1];
  213. });
  214. const points = getFlotPairs({
  215. rows: series.datapoints,
  216. xIndex: 1,
  217. yIndex: 0,
  218. nullValueMode: NullValueMode.Null,
  219. });
  220. const graphSeries: GraphSeriesXY = {
  221. color: series.color,
  222. label: series.alias,
  223. data: points,
  224. isVisible: true,
  225. yAxis: {
  226. index: 1,
  227. min: 0,
  228. tickDecimals: 0,
  229. },
  230. };
  231. return graphSeries;
  232. });
  233. }
  234. function isLogsData(series: DataFrame) {
  235. return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
  236. }
  237. export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number): LogsModel {
  238. const metricSeries: DataFrame[] = [];
  239. const logSeries: DataFrame[] = [];
  240. for (const series of dataFrame) {
  241. if (isLogsData(series)) {
  242. logSeries.push(series);
  243. continue;
  244. }
  245. metricSeries.push(series);
  246. }
  247. const logsModel = logSeriesToLogsModel(logSeries);
  248. if (logsModel) {
  249. if (metricSeries.length === 0) {
  250. logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs);
  251. } else {
  252. logsModel.series = getGraphSeriesModel(
  253. { series: metricSeries, state: LoadingState.Done },
  254. {},
  255. { showBars: true, showLines: false, showPoints: false },
  256. {
  257. asTable: false,
  258. isVisible: true,
  259. placement: 'under',
  260. }
  261. );
  262. }
  263. return logsModel;
  264. }
  265. return {
  266. hasUniqueLabels: false,
  267. rows: [],
  268. meta: [],
  269. series: [],
  270. };
  271. }
  272. export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
  273. if (logSeries.length === 0) {
  274. return undefined;
  275. }
  276. const allLabels: Labels[] = [];
  277. for (let n = 0; n < logSeries.length; n++) {
  278. const series = logSeries[n];
  279. if (series.labels) {
  280. allLabels.push(series.labels);
  281. }
  282. }
  283. let commonLabels: Labels = {};
  284. if (allLabels.length > 0) {
  285. commonLabels = findCommonLabels(allLabels);
  286. }
  287. const rows: LogRowModel[] = [];
  288. let hasUniqueLabels = false;
  289. for (let i = 0; i < logSeries.length; i++) {
  290. const series = logSeries[i];
  291. const fieldCache = new FieldCache(series.fields);
  292. const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
  293. if (Object.keys(uniqueLabels).length > 0) {
  294. hasUniqueLabels = true;
  295. }
  296. for (let j = 0; j < series.rows.length; j++) {
  297. rows.push(processLogSeriesRow(series, fieldCache, j, uniqueLabels));
  298. }
  299. }
  300. // Meta data to display in status
  301. const meta: LogsMetaItem[] = [];
  302. if (_.size(commonLabels) > 0) {
  303. meta.push({
  304. label: 'Common labels',
  305. value: commonLabels,
  306. kind: LogsMetaKind.LabelsMap,
  307. });
  308. }
  309. const limits = logSeries.filter(series => series.meta && series.meta.limit);
  310. if (limits.length > 0) {
  311. meta.push({
  312. label: 'Limit',
  313. value: `${limits[0].meta.limit} (${rows.length} returned)`,
  314. kind: LogsMetaKind.String,
  315. });
  316. }
  317. return {
  318. hasUniqueLabels,
  319. meta,
  320. rows,
  321. };
  322. }
  323. export function processLogSeriesRow(
  324. series: DataFrame,
  325. fieldCache: FieldCache,
  326. rowIndex: number,
  327. uniqueLabels: Labels
  328. ): LogRowModel {
  329. const row = series.rows[rowIndex];
  330. const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time).index;
  331. const ts = row[timeFieldIndex];
  332. const stringFieldIndex = fieldCache.getFirstFieldOfType(FieldType.string).index;
  333. const message = row[stringFieldIndex];
  334. const time = dateTime(ts);
  335. const timeEpochMs = time.valueOf();
  336. const timeFromNow = time.fromNow();
  337. const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
  338. const timeUtc = toUtc(ts).format('YYYY-MM-DD HH:mm:ss');
  339. let logLevel = LogLevel.unknown;
  340. const logLevelField = fieldCache.getFieldByName('level');
  341. if (logLevelField) {
  342. logLevel = getLogLevelFromKey(row[logLevelField.index]);
  343. } else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
  344. logLevel = getLogLevelFromKey(series.labels['level']);
  345. } else {
  346. logLevel = getLogLevel(message);
  347. }
  348. const hasAnsi = hasAnsiCodes(message);
  349. const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
  350. return {
  351. logLevel,
  352. timeFromNow,
  353. timeEpochMs,
  354. timeLocal,
  355. timeUtc,
  356. uniqueLabels,
  357. hasAnsi,
  358. searchWords,
  359. entry: hasAnsi ? ansicolor.strip(message) : message,
  360. raw: message,
  361. labels: series.labels,
  362. timestamp: ts,
  363. };
  364. }