logs_model.ts 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346
  1. import _ from 'lodash';
  2. import { colors } from '@grafana/ui';
  3. import { TimeSeries } from 'app/core/core';
  4. import { getThemeColor } from 'app/core/utils/colors';
  5. /**
  6. * Mapping of log level abbreviation to canonical log level.
  7. * Supported levels are reduce to limit color variation.
  8. */
  9. export enum LogLevel {
  10. emerg = 'critical',
  11. alert = 'critical',
  12. crit = 'critical',
  13. critical = 'critical',
  14. warn = 'warning',
  15. warning = 'warning',
  16. err = 'error',
  17. eror = 'error',
  18. error = 'error',
  19. info = 'info',
  20. notice = 'info',
  21. dbug = 'debug',
  22. debug = 'debug',
  23. trace = 'trace',
  24. unkown = 'unkown',
  25. }
  26. export const LogLevelColor = {
  27. [LogLevel.critical]: colors[7],
  28. [LogLevel.warning]: colors[1],
  29. [LogLevel.error]: colors[4],
  30. [LogLevel.info]: colors[0],
  31. [LogLevel.debug]: colors[5],
  32. [LogLevel.trace]: colors[2],
  33. [LogLevel.unkown]: getThemeColor('#8e8e8e', '#dde4ed'),
  34. };
  35. export interface LogSearchMatch {
  36. start: number;
  37. length: number;
  38. text: string;
  39. }
  40. export interface LogRow {
  41. duplicates?: number;
  42. entry: string;
  43. key: string; // timestamp + labels
  44. labels: LogsStreamLabels;
  45. logLevel: LogLevel;
  46. searchWords?: string[];
  47. timestamp: string; // ISO with nanosec precision
  48. timeFromNow: string;
  49. timeEpochMs: number;
  50. timeLocal: string;
  51. uniqueLabels?: LogsStreamLabels;
  52. }
  53. export interface LogsLabelStat {
  54. active?: boolean;
  55. count: number;
  56. proportion: number;
  57. value: string;
  58. }
  59. export enum LogsMetaKind {
  60. Number,
  61. String,
  62. LabelsMap,
  63. }
  64. export interface LogsMetaItem {
  65. label: string;
  66. value: string | number | LogsStreamLabels;
  67. kind: LogsMetaKind;
  68. }
  69. export interface LogsModel {
  70. id: string; // Identify one logs result from another
  71. meta?: LogsMetaItem[];
  72. rows: LogRow[];
  73. series?: TimeSeries[];
  74. }
  75. export interface LogsStream {
  76. labels: string;
  77. entries: LogsStreamEntry[];
  78. search?: string;
  79. parsedLabels?: LogsStreamLabels;
  80. uniqueLabels?: LogsStreamLabels;
  81. }
  82. export interface LogsStreamEntry {
  83. line: string;
  84. ts: string;
  85. // Legacy, was renamed to ts
  86. timestamp?: string;
  87. }
  88. export interface LogsStreamLabels {
  89. [key: string]: string;
  90. }
  91. export enum LogsDedupDescription {
  92. none = 'No de-duplication',
  93. exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
  94. numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.',
  95. signature = 'De-duplication of successive lines that have identical punctuation and whitespace.',
  96. }
  97. export enum LogsDedupStrategy {
  98. none = 'none',
  99. exact = 'exact',
  100. numbers = 'numbers',
  101. signature = 'signature',
  102. }
  103. export interface LogsParser {
  104. /**
  105. * Value-agnostic matcher for a field label.
  106. * Used to filter rows, and first capture group contains the value.
  107. */
  108. buildMatcher: (label: string) => RegExp;
  109. /**
  110. * Returns all parsable substrings from a line, used for highlighting
  111. */
  112. getFields: (line: string) => string[];
  113. /**
  114. * Gets the label name from a parsable substring of a line
  115. */
  116. getLabelFromField: (field: string) => string;
  117. /**
  118. * Gets the label value from a parsable substring of a line
  119. */
  120. getValueFromField: (field: string) => string;
  121. /**
  122. * Function to verify if this is a valid parser for the given line.
  123. * The parser accepts the line unless it returns undefined.
  124. */
  125. test: (line: string) => any;
  126. }
  127. const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/;
  128. export const LogsParsers: { [name: string]: LogsParser } = {
  129. JSON: {
  130. buildMatcher: label => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
  131. getFields: line => {
  132. const fields = [];
  133. try {
  134. const parsed = JSON.parse(line);
  135. _.map(parsed, (value, key) => {
  136. const fieldMatcher = new RegExp(`"${key}"\\s*:\\s*"?${_.escapeRegExp(JSON.stringify(value))}"?`);
  137. const match = line.match(fieldMatcher);
  138. if (match) {
  139. fields.push(match[0]);
  140. }
  141. });
  142. } catch {}
  143. return fields;
  144. },
  145. getLabelFromField: field => (field.match(/^"(\w+)"\s*:/) || [])[1],
  146. getValueFromField: field => (field.match(/:\s*(.*)$/) || [])[1],
  147. test: line => {
  148. try {
  149. return JSON.parse(line);
  150. } catch (error) {}
  151. },
  152. },
  153. logfmt: {
  154. buildMatcher: label => new RegExp(`(?:^|\\s)${label}=("[^"]*"|\\S+)`),
  155. getFields: line => {
  156. const fields = [];
  157. line.replace(new RegExp(LOGFMT_REGEXP, 'g'), substring => {
  158. fields.push(substring.trim());
  159. return '';
  160. });
  161. return fields;
  162. },
  163. getLabelFromField: field => (field.match(LOGFMT_REGEXP) || [])[1],
  164. getValueFromField: field => (field.match(LOGFMT_REGEXP) || [])[2],
  165. test: line => LOGFMT_REGEXP.test(line),
  166. },
  167. };
  168. export function calculateFieldStats(rows: LogRow[], extractor: RegExp): LogsLabelStat[] {
  169. // Consider only rows that satisfy the matcher
  170. const rowsWithField = rows.filter(row => extractor.test(row.entry));
  171. const rowCount = rowsWithField.length;
  172. // Get field value counts for eligible rows
  173. const countsByValue = _.countBy(rowsWithField, row => (row as LogRow).entry.match(extractor)[1]);
  174. const sortedCounts = _.chain(countsByValue)
  175. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  176. .sortBy('count')
  177. .reverse()
  178. .value();
  179. return sortedCounts;
  180. }
  181. export function calculateLogsLabelStats(rows: LogRow[], label: string): LogsLabelStat[] {
  182. // Consider only rows that have the given label
  183. const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
  184. const rowCount = rowsWithLabel.length;
  185. // Get label value counts for eligible rows
  186. const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRow).labels[label]);
  187. const sortedCounts = _.chain(countsByValue)
  188. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  189. .sortBy('count')
  190. .reverse()
  191. .value();
  192. return sortedCounts;
  193. }
  194. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  195. function isDuplicateRow(row: LogRow, other: LogRow, strategy: LogsDedupStrategy): boolean {
  196. switch (strategy) {
  197. case LogsDedupStrategy.exact:
  198. // Exact still strips dates
  199. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  200. case LogsDedupStrategy.numbers:
  201. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  202. case LogsDedupStrategy.signature:
  203. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  204. default:
  205. return false;
  206. }
  207. }
  208. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  209. if (strategy === LogsDedupStrategy.none) {
  210. return logs;
  211. }
  212. const dedupedRows = logs.rows.reduce((result: LogRow[], row: LogRow, index, list) => {
  213. const previous = result[result.length - 1];
  214. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  215. previous.duplicates++;
  216. } else {
  217. row.duplicates = 0;
  218. result.push(row);
  219. }
  220. return result;
  221. }, []);
  222. return {
  223. ...logs,
  224. rows: dedupedRows,
  225. };
  226. }
  227. export function getParser(line: string): LogsParser {
  228. let parser;
  229. try {
  230. if (LogsParsers.JSON.test(line)) {
  231. parser = LogsParsers.JSON;
  232. }
  233. } catch (error) {}
  234. if (!parser && LogsParsers.logfmt.test(line)) {
  235. parser = LogsParsers.logfmt;
  236. }
  237. return parser;
  238. }
  239. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  240. if (hiddenLogLevels.size === 0) {
  241. return logs;
  242. }
  243. const filteredRows = logs.rows.reduce((result: LogRow[], row: LogRow, index, list) => {
  244. if (!hiddenLogLevels.has(row.logLevel)) {
  245. result.push(row);
  246. }
  247. return result;
  248. }, []);
  249. return {
  250. ...logs,
  251. rows: filteredRows,
  252. };
  253. }
  254. export function makeSeriesForLogs(rows: LogRow[], intervalMs: number): TimeSeries[] {
  255. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  256. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  257. // when executing queries & interval calculated and not here but this is a temporary fix.
  258. // intervalMs = intervalMs * 10;
  259. // Graph time series by log level
  260. const seriesByLevel = {};
  261. const bucketSize = intervalMs * 10;
  262. const seriesList = [];
  263. for (const row of rows) {
  264. let series = seriesByLevel[row.logLevel];
  265. if (!series) {
  266. seriesByLevel[row.logLevel] = series = {
  267. lastTs: null,
  268. datapoints: [],
  269. alias: row.logLevel,
  270. color: LogLevelColor[row.logLevel],
  271. };
  272. seriesList.push(series);
  273. }
  274. // align time to bucket size
  275. const time = Math.round(row.timeEpochMs / bucketSize) * bucketSize;
  276. // Entry for time
  277. if (time === series.lastTs) {
  278. series.datapoints[series.datapoints.length - 1][0]++;
  279. } else {
  280. series.datapoints.push([1, time]);
  281. series.lastTs = time;
  282. }
  283. // add zero to other levels to aid stacking so each level series has same number of points
  284. for (const other of seriesList) {
  285. if (other !== series && other.lastTs !== time) {
  286. other.datapoints.push([0, time]);
  287. other.lastTs = time;
  288. }
  289. }
  290. }
  291. return seriesList.map(series => {
  292. series.datapoints.sort((a, b) => {
  293. return a[1] - b[1];
  294. });
  295. return new TimeSeries(series);
  296. });
  297. }