logs_model.ts 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335
  1. import _ from 'lodash';
  2. import { TimeSeries } from 'app/core/core';
  3. import colors, { getThemeColor } from 'app/core/utils/colors';
  4. export enum LogLevel {
  5. crit = 'critical',
  6. critical = 'critical',
  7. warn = 'warning',
  8. warning = 'warning',
  9. err = 'error',
  10. error = 'error',
  11. info = 'info',
  12. debug = 'debug',
  13. trace = 'trace',
  14. unkown = 'unkown',
  15. }
  16. export const LogLevelColor = {
  17. [LogLevel.critical]: colors[7],
  18. [LogLevel.warning]: colors[1],
  19. [LogLevel.error]: colors[4],
  20. [LogLevel.info]: colors[0],
  21. [LogLevel.debug]: colors[5],
  22. [LogLevel.trace]: colors[2],
  23. [LogLevel.unkown]: getThemeColor('#8e8e8e', '#dde4ed'),
  24. };
  25. export interface LogSearchMatch {
  26. start: number;
  27. length: number;
  28. text: string;
  29. }
  30. export interface LogRow {
  31. duplicates?: number;
  32. entry: string;
  33. key: string; // timestamp + labels
  34. labels: LogsStreamLabels;
  35. logLevel: LogLevel;
  36. searchWords?: string[];
  37. timestamp: string; // ISO with nanosec precision
  38. timeFromNow: string;
  39. timeEpochMs: number;
  40. timeLocal: string;
  41. uniqueLabels?: LogsStreamLabels;
  42. }
  43. export interface LogsLabelStat {
  44. active?: boolean;
  45. count: number;
  46. proportion: number;
  47. value: string;
  48. }
  49. export enum LogsMetaKind {
  50. Number,
  51. String,
  52. LabelsMap,
  53. }
  54. export interface LogsMetaItem {
  55. label: string;
  56. value: string | number | LogsStreamLabels;
  57. kind: LogsMetaKind;
  58. }
  59. export interface LogsModel {
  60. id: string; // Identify one logs result from another
  61. meta?: LogsMetaItem[];
  62. rows: LogRow[];
  63. series?: TimeSeries[];
  64. }
  65. export interface LogsStream {
  66. labels: string;
  67. entries: LogsStreamEntry[];
  68. search?: string;
  69. parsedLabels?: LogsStreamLabels;
  70. uniqueLabels?: LogsStreamLabels;
  71. }
  72. export interface LogsStreamEntry {
  73. line: string;
  74. ts: string;
  75. // Legacy, was renamed to ts
  76. timestamp?: string;
  77. }
  78. export interface LogsStreamLabels {
  79. [key: string]: string;
  80. }
  81. export enum LogsDedupDescription {
  82. none = 'No de-duplication',
  83. exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
  84. numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.',
  85. signature = 'De-duplication of successive lines that have identical punctuation and whitespace.',
  86. }
  87. export enum LogsDedupStrategy {
  88. none = 'none',
  89. exact = 'exact',
  90. numbers = 'numbers',
  91. signature = 'signature',
  92. }
  93. export interface LogsParser {
  94. /**
  95. * Value-agnostic matcher for a field label.
  96. * Used to filter rows, and first capture group contains the value.
  97. */
  98. buildMatcher: (label: string) => RegExp;
  99. /**
  100. * Returns all parsable substrings from a line, used for highlighting
  101. */
  102. getFields: (line: string) => string[];
  103. /**
  104. * Gets the label name from a parsable substring of a line
  105. */
  106. getLabelFromField: (field: string) => string;
  107. /**
  108. * Gets the label value from a parsable substring of a line
  109. */
  110. getValueFromField: (field: string) => string;
  111. /**
  112. * Function to verify if this is a valid parser for the given line.
  113. * The parser accepts the line unless it returns undefined.
  114. */
  115. test: (line: string) => any;
  116. }
  117. const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/;
  118. export const LogsParsers: { [name: string]: LogsParser } = {
  119. JSON: {
  120. buildMatcher: label => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
  121. getFields: line => {
  122. const fields = [];
  123. try {
  124. const parsed = JSON.parse(line);
  125. _.map(parsed, (value, key) => {
  126. const fieldMatcher = new RegExp(`"${key}"\\s*:\\s*"?${_.escapeRegExp(JSON.stringify(value))}"?`);
  127. const match = line.match(fieldMatcher);
  128. if (match) {
  129. fields.push(match[0]);
  130. }
  131. });
  132. } catch {}
  133. return fields;
  134. },
  135. getLabelFromField: field => (field.match(/^"(\w+)"\s*:/) || [])[1],
  136. getValueFromField: field => (field.match(/:\s*(.*)$/) || [])[1],
  137. test: line => {
  138. try {
  139. return JSON.parse(line);
  140. } catch (error) {}
  141. },
  142. },
  143. logfmt: {
  144. buildMatcher: label => new RegExp(`(?:^|\\s)${label}=("[^"]*"|\\S+)`),
  145. getFields: line => {
  146. const fields = [];
  147. line.replace(new RegExp(LOGFMT_REGEXP, 'g'), substring => {
  148. fields.push(substring.trim());
  149. return '';
  150. });
  151. return fields;
  152. },
  153. getLabelFromField: field => (field.match(LOGFMT_REGEXP) || [])[1],
  154. getValueFromField: field => (field.match(LOGFMT_REGEXP) || [])[2],
  155. test: line => LOGFMT_REGEXP.test(line),
  156. },
  157. };
  158. export function calculateFieldStats(rows: LogRow[], extractor: RegExp): LogsLabelStat[] {
  159. // Consider only rows that satisfy the matcher
  160. const rowsWithField = rows.filter(row => extractor.test(row.entry));
  161. const rowCount = rowsWithField.length;
  162. // Get field value counts for eligible rows
  163. const countsByValue = _.countBy(rowsWithField, row => (row as LogRow).entry.match(extractor)[1]);
  164. const sortedCounts = _.chain(countsByValue)
  165. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  166. .sortBy('count')
  167. .reverse()
  168. .value();
  169. return sortedCounts;
  170. }
  171. export function calculateLogsLabelStats(rows: LogRow[], label: string): LogsLabelStat[] {
  172. // Consider only rows that have the given label
  173. const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
  174. const rowCount = rowsWithLabel.length;
  175. // Get label value counts for eligible rows
  176. const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRow).labels[label]);
  177. const sortedCounts = _.chain(countsByValue)
  178. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  179. .sortBy('count')
  180. .reverse()
  181. .value();
  182. return sortedCounts;
  183. }
  184. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  185. function isDuplicateRow(row: LogRow, other: LogRow, strategy: LogsDedupStrategy): boolean {
  186. switch (strategy) {
  187. case LogsDedupStrategy.exact:
  188. // Exact still strips dates
  189. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  190. case LogsDedupStrategy.numbers:
  191. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  192. case LogsDedupStrategy.signature:
  193. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  194. default:
  195. return false;
  196. }
  197. }
  198. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  199. if (strategy === LogsDedupStrategy.none) {
  200. return logs;
  201. }
  202. const dedupedRows = logs.rows.reduce((result: LogRow[], row: LogRow, index, list) => {
  203. const previous = result[result.length - 1];
  204. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  205. previous.duplicates++;
  206. } else {
  207. row.duplicates = 0;
  208. result.push(row);
  209. }
  210. return result;
  211. }, []);
  212. return {
  213. ...logs,
  214. rows: dedupedRows,
  215. };
  216. }
  217. export function getParser(line: string): LogsParser {
  218. let parser;
  219. try {
  220. if (LogsParsers.JSON.test(line)) {
  221. parser = LogsParsers.JSON;
  222. }
  223. } catch (error) {}
  224. if (!parser && LogsParsers.logfmt.test(line)) {
  225. parser = LogsParsers.logfmt;
  226. }
  227. return parser;
  228. }
  229. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  230. if (hiddenLogLevels.size === 0) {
  231. return logs;
  232. }
  233. const filteredRows = logs.rows.reduce((result: LogRow[], row: LogRow, index, list) => {
  234. if (!hiddenLogLevels.has(row.logLevel)) {
  235. result.push(row);
  236. }
  237. return result;
  238. }, []);
  239. return {
  240. ...logs,
  241. rows: filteredRows,
  242. };
  243. }
  244. export function makeSeriesForLogs(rows: LogRow[], intervalMs: number): TimeSeries[] {
  245. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  246. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  247. // when executing queries & interval calculated and not here but this is a temporary fix.
  248. // intervalMs = intervalMs * 10;
  249. // Graph time series by log level
  250. const seriesByLevel = {};
  251. const bucketSize = intervalMs * 10;
  252. const seriesList = [];
  253. for (const row of rows) {
  254. let series = seriesByLevel[row.logLevel];
  255. if (!series) {
  256. seriesByLevel[row.logLevel] = series = {
  257. lastTs: null,
  258. datapoints: [],
  259. alias: row.logLevel,
  260. color: LogLevelColor[row.logLevel],
  261. };
  262. seriesList.push(series);
  263. }
  264. // align time to bucket size
  265. const time = Math.round(row.timeEpochMs / bucketSize) * bucketSize;
  266. // Entry for time
  267. if (time === series.lastTs) {
  268. series.datapoints[series.datapoints.length - 1][0]++;
  269. } else {
  270. series.datapoints.push([1, time]);
  271. series.lastTs = time;
  272. }
  273. // add zero to other levels to aid stacking so each level series has same number of points
  274. for (const other of seriesList) {
  275. if (other !== series && other.lastTs !== time) {
  276. other.datapoints.push([0, time]);
  277. other.lastTs = time;
  278. }
  279. }
  280. }
  281. return seriesList.map(series => {
  282. series.datapoints.sort((a, b) => {
  283. return a[1] - b[1];
  284. });
  285. return new TimeSeries(series);
  286. });
  287. }