logs_model.ts 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399
  1. import _ from 'lodash';
  2. import ansicolor from 'vendor/ansicolor/ansicolor';
  3. import {
  4. colors,
  5. TimeSeries,
  6. Labels,
  7. LogLevel,
  8. SeriesData,
  9. findCommonLabels,
  10. findUniqueLabels,
  11. getLogLevel,
  12. toLegacyResponseData,
  13. FieldCache,
  14. FieldType,
  15. getLogLevelFromKey,
  16. LogRowModel,
  17. LogsModel,
  18. LogsMetaItem,
  19. LogsMetaKind,
  20. LogsParser,
  21. LogLabelStatsModel,
  22. LogsDedupStrategy,
  23. } from '@grafana/ui';
  24. import { getThemeColor } from 'app/core/utils/colors';
  25. import { hasAnsiCodes } from 'app/core/utils/text';
  26. import { dateTime } from '@grafana/ui/src/utils/moment_wrapper';
  27. export const LogLevelColor = {
  28. [LogLevel.critical]: colors[7],
  29. [LogLevel.warning]: colors[1],
  30. [LogLevel.error]: colors[4],
  31. [LogLevel.info]: colors[0],
  32. [LogLevel.debug]: colors[5],
  33. [LogLevel.trace]: colors[2],
  34. [LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'),
  35. };
  36. export enum LogsDedupDescription {
  37. none = 'No de-duplication',
  38. exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
  39. numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.',
  40. signature = 'De-duplication of successive lines that have identical punctuation and whitespace.',
  41. }
  42. const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/;
  43. export const LogsParsers: { [name: string]: LogsParser } = {
  44. JSON: {
  45. buildMatcher: label => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
  46. getFields: line => {
  47. const fields: string[] = [];
  48. try {
  49. const parsed = JSON.parse(line);
  50. _.map(parsed, (value, key) => {
  51. const fieldMatcher = new RegExp(`"${key}"\\s*:\\s*"?${_.escapeRegExp(JSON.stringify(value))}"?`);
  52. const match = line.match(fieldMatcher);
  53. if (match) {
  54. fields.push(match[0]);
  55. }
  56. });
  57. } catch {}
  58. return fields;
  59. },
  60. getLabelFromField: field => (field.match(/^"(\w+)"\s*:/) || [])[1],
  61. getValueFromField: field => (field.match(/:\s*(.*)$/) || [])[1],
  62. test: line => {
  63. try {
  64. return JSON.parse(line);
  65. } catch (error) {}
  66. },
  67. },
  68. logfmt: {
  69. buildMatcher: label => new RegExp(`(?:^|\\s)${label}=("[^"]*"|\\S+)`),
  70. getFields: line => {
  71. const fields: string[] = [];
  72. line.replace(new RegExp(LOGFMT_REGEXP, 'g'), substring => {
  73. fields.push(substring.trim());
  74. return '';
  75. });
  76. return fields;
  77. },
  78. getLabelFromField: field => (field.match(LOGFMT_REGEXP) || [])[1],
  79. getValueFromField: field => (field.match(LOGFMT_REGEXP) || [])[2],
  80. test: line => LOGFMT_REGEXP.test(line),
  81. },
  82. };
  83. export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): LogLabelStatsModel[] {
  84. // Consider only rows that satisfy the matcher
  85. const rowsWithField = rows.filter(row => extractor.test(row.entry));
  86. const rowCount = rowsWithField.length;
  87. // Get field value counts for eligible rows
  88. const countsByValue = _.countBy(rowsWithField, row => (row as LogRowModel).entry.match(extractor)[1]);
  89. const sortedCounts = _.chain(countsByValue)
  90. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  91. .sortBy('count')
  92. .reverse()
  93. .value();
  94. return sortedCounts;
  95. }
  96. export function calculateLogsLabelStats(rows: LogRowModel[], label: string): LogLabelStatsModel[] {
  97. // Consider only rows that have the given label
  98. const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
  99. const rowCount = rowsWithLabel.length;
  100. // Get label value counts for eligible rows
  101. const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRowModel).labels[label]);
  102. const sortedCounts = _.chain(countsByValue)
  103. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  104. .sortBy('count')
  105. .reverse()
  106. .value();
  107. return sortedCounts;
  108. }
  109. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  110. function isDuplicateRow(row: LogRowModel, other: LogRowModel, strategy: LogsDedupStrategy): boolean {
  111. switch (strategy) {
  112. case LogsDedupStrategy.exact:
  113. // Exact still strips dates
  114. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  115. case LogsDedupStrategy.numbers:
  116. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  117. case LogsDedupStrategy.signature:
  118. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  119. default:
  120. return false;
  121. }
  122. }
  123. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  124. if (strategy === LogsDedupStrategy.none) {
  125. return logs;
  126. }
  127. const dedupedRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  128. const rowCopy = { ...row };
  129. const previous = result[result.length - 1];
  130. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  131. previous.duplicates++;
  132. } else {
  133. rowCopy.duplicates = 0;
  134. result.push(rowCopy);
  135. }
  136. return result;
  137. }, []);
  138. return {
  139. ...logs,
  140. rows: dedupedRows,
  141. };
  142. }
  143. export function getParser(line: string): LogsParser {
  144. let parser;
  145. try {
  146. if (LogsParsers.JSON.test(line)) {
  147. parser = LogsParsers.JSON;
  148. }
  149. } catch (error) {}
  150. if (!parser && LogsParsers.logfmt.test(line)) {
  151. parser = LogsParsers.logfmt;
  152. }
  153. return parser;
  154. }
  155. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  156. if (hiddenLogLevels.size === 0) {
  157. return logs;
  158. }
  159. const filteredRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  160. if (!hiddenLogLevels.has(row.logLevel)) {
  161. result.push(row);
  162. }
  163. return result;
  164. }, []);
  165. return {
  166. ...logs,
  167. rows: filteredRows,
  168. };
  169. }
  170. export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): TimeSeries[] {
  171. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  172. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  173. // when executing queries & interval calculated and not here but this is a temporary fix.
  174. // intervalMs = intervalMs * 10;
  175. // Graph time series by log level
  176. const seriesByLevel: any = {};
  177. const bucketSize = intervalMs * 10;
  178. const seriesList: any[] = [];
  179. for (const row of rows) {
  180. let series = seriesByLevel[row.logLevel];
  181. if (!series) {
  182. seriesByLevel[row.logLevel] = series = {
  183. lastTs: null,
  184. datapoints: [],
  185. alias: row.logLevel,
  186. color: LogLevelColor[row.logLevel],
  187. };
  188. seriesList.push(series);
  189. }
  190. // align time to bucket size
  191. const time = Math.round(row.timeEpochMs / bucketSize) * bucketSize;
  192. // Entry for time
  193. if (time === series.lastTs) {
  194. series.datapoints[series.datapoints.length - 1][0]++;
  195. } else {
  196. series.datapoints.push([1, time]);
  197. series.lastTs = time;
  198. }
  199. // add zero to other levels to aid stacking so each level series has same number of points
  200. for (const other of seriesList) {
  201. if (other !== series && other.lastTs !== time) {
  202. other.datapoints.push([0, time]);
  203. other.lastTs = time;
  204. }
  205. }
  206. }
  207. return seriesList.map(series => {
  208. series.datapoints.sort((a: number[], b: number[]) => {
  209. return a[1] - b[1];
  210. });
  211. return {
  212. datapoints: series.datapoints,
  213. target: series.alias,
  214. alias: series.alias,
  215. color: series.color,
  216. };
  217. });
  218. }
  219. function isLogsData(series: SeriesData) {
  220. return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
  221. }
  222. export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: number): LogsModel {
  223. const metricSeries: SeriesData[] = [];
  224. const logSeries: SeriesData[] = [];
  225. for (const series of seriesData) {
  226. if (isLogsData(series)) {
  227. logSeries.push(series);
  228. continue;
  229. }
  230. metricSeries.push(series);
  231. }
  232. const logsModel = logSeriesToLogsModel(logSeries);
  233. if (logsModel) {
  234. if (metricSeries.length === 0) {
  235. logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs);
  236. } else {
  237. logsModel.series = [];
  238. for (const series of metricSeries) {
  239. logsModel.series.push(toLegacyResponseData(series) as TimeSeries);
  240. }
  241. }
  242. return logsModel;
  243. }
  244. return {
  245. hasUniqueLabels: false,
  246. rows: [],
  247. meta: [],
  248. series: [],
  249. };
  250. }
  251. export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
  252. if (logSeries.length === 0) {
  253. return undefined;
  254. }
  255. const allLabels: Labels[] = [];
  256. for (let n = 0; n < logSeries.length; n++) {
  257. const series = logSeries[n];
  258. if (series.labels) {
  259. allLabels.push(series.labels);
  260. }
  261. }
  262. let commonLabels: Labels = {};
  263. if (allLabels.length > 0) {
  264. commonLabels = findCommonLabels(allLabels);
  265. }
  266. const rows: LogRowModel[] = [];
  267. let hasUniqueLabels = false;
  268. for (let i = 0; i < logSeries.length; i++) {
  269. const series = logSeries[i];
  270. const fieldCache = new FieldCache(series.fields);
  271. const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
  272. if (Object.keys(uniqueLabels).length > 0) {
  273. hasUniqueLabels = true;
  274. }
  275. for (let j = 0; j < series.rows.length; j++) {
  276. rows.push(processLogSeriesRow(series, fieldCache, j, uniqueLabels));
  277. }
  278. }
  279. const sortedRows = rows.sort((a, b) => {
  280. return a.timestamp > b.timestamp ? -1 : 1;
  281. });
  282. // Meta data to display in status
  283. const meta: LogsMetaItem[] = [];
  284. if (_.size(commonLabels) > 0) {
  285. meta.push({
  286. label: 'Common labels',
  287. value: commonLabels,
  288. kind: LogsMetaKind.LabelsMap,
  289. });
  290. }
  291. const limits = logSeries.filter(series => series.meta && series.meta.limit);
  292. if (limits.length > 0) {
  293. meta.push({
  294. label: 'Limit',
  295. value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
  296. kind: LogsMetaKind.String,
  297. });
  298. }
  299. return {
  300. hasUniqueLabels,
  301. meta,
  302. rows: sortedRows,
  303. };
  304. }
  305. export function processLogSeriesRow(
  306. series: SeriesData,
  307. fieldCache: FieldCache,
  308. rowIndex: number,
  309. uniqueLabels: Labels
  310. ): LogRowModel {
  311. const row = series.rows[rowIndex];
  312. const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time).index;
  313. const ts = row[timeFieldIndex];
  314. const stringFieldIndex = fieldCache.getFirstFieldOfType(FieldType.string).index;
  315. const message = row[stringFieldIndex];
  316. const time = dateTime(ts);
  317. const timeEpochMs = time.valueOf();
  318. const timeFromNow = time.fromNow();
  319. const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
  320. let logLevel = LogLevel.unknown;
  321. const logLevelField = fieldCache.getFieldByName('level');
  322. if (logLevelField) {
  323. logLevel = getLogLevelFromKey(row[logLevelField.index]);
  324. } else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) {
  325. logLevel = getLogLevelFromKey(series.labels['level']);
  326. } else {
  327. logLevel = getLogLevel(message);
  328. }
  329. const hasAnsi = hasAnsiCodes(message);
  330. const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];
  331. return {
  332. logLevel,
  333. timeFromNow,
  334. timeEpochMs,
  335. timeLocal,
  336. uniqueLabels,
  337. hasAnsi,
  338. searchWords,
  339. entry: hasAnsi ? ansicolor.strip(message) : message,
  340. raw: message,
  341. labels: series.labels,
  342. timestamp: ts,
  343. };
  344. }