logs_model.ts 9.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354
  1. import _ from 'lodash';
  2. import { colors, TimeSeries } from '@grafana/ui';
  3. import { getThemeColor } from 'app/core/utils/colors';
  4. /**
  5. * Mapping of log level abbreviation to canonical log level.
  6. * Supported levels are reduce to limit color variation.
  7. */
  8. export enum LogLevel {
  9. emerg = 'critical',
  10. alert = 'critical',
  11. crit = 'critical',
  12. critical = 'critical',
  13. warn = 'warning',
  14. warning = 'warning',
  15. err = 'error',
  16. eror = 'error',
  17. error = 'error',
  18. info = 'info',
  19. notice = 'info',
  20. dbug = 'debug',
  21. debug = 'debug',
  22. trace = 'trace',
  23. unknown = 'unknown',
  24. }
  25. export const LogLevelColor = {
  26. [LogLevel.critical]: colors[7],
  27. [LogLevel.warning]: colors[1],
  28. [LogLevel.error]: colors[4],
  29. [LogLevel.info]: colors[0],
  30. [LogLevel.debug]: colors[5],
  31. [LogLevel.trace]: colors[2],
  32. [LogLevel.unknown]: getThemeColor('#8e8e8e', '#dde4ed'),
  33. };
  34. export interface LogSearchMatch {
  35. start: number;
  36. length: number;
  37. text: string;
  38. }
  39. export interface LogRowModel {
  40. duplicates?: number;
  41. entry: string;
  42. hasAnsi: boolean;
  43. key: string; // timestamp + labels
  44. labels: LogsStreamLabels;
  45. logLevel: LogLevel;
  46. raw: string;
  47. searchWords?: string[];
  48. timestamp: string; // ISO with nanosec precision
  49. timeFromNow: string;
  50. timeEpochMs: number;
  51. timeLocal: string;
  52. uniqueLabels?: LogsStreamLabels;
  53. }
  54. export interface LogLabelStatsModel {
  55. active?: boolean;
  56. count: number;
  57. proportion: number;
  58. value: string;
  59. }
  60. export enum LogsMetaKind {
  61. Number,
  62. String,
  63. LabelsMap,
  64. }
  65. export interface LogsMetaItem {
  66. label: string;
  67. value: string | number | LogsStreamLabels;
  68. kind: LogsMetaKind;
  69. }
  70. export interface LogsModel {
  71. hasUniqueLabels: boolean;
  72. id: string; // Identify one logs result from another
  73. meta?: LogsMetaItem[];
  74. rows: LogRowModel[];
  75. series?: TimeSeries[];
  76. }
  77. export interface LogsStream {
  78. labels: string;
  79. entries: LogsStreamEntry[];
  80. search?: string;
  81. parsedLabels?: LogsStreamLabels;
  82. uniqueLabels?: LogsStreamLabels;
  83. }
  84. export interface LogsStreamEntry {
  85. line: string;
  86. ts: string;
  87. // Legacy, was renamed to ts
  88. timestamp?: string;
  89. }
  90. export interface LogsStreamLabels {
  91. [key: string]: string;
  92. }
  93. export enum LogsDedupDescription {
  94. none = 'No de-duplication',
  95. exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
  96. numbers = 'De-duplication of successive lines that are identical when ignoring numbers, e.g., IP addresses, latencies.',
  97. signature = 'De-duplication of successive lines that have identical punctuation and whitespace.',
  98. }
  99. export enum LogsDedupStrategy {
  100. none = 'none',
  101. exact = 'exact',
  102. numbers = 'numbers',
  103. signature = 'signature',
  104. }
  105. export interface LogsParser {
  106. /**
  107. * Value-agnostic matcher for a field label.
  108. * Used to filter rows, and first capture group contains the value.
  109. */
  110. buildMatcher: (label: string) => RegExp;
  111. /**
  112. * Returns all parsable substrings from a line, used for highlighting
  113. */
  114. getFields: (line: string) => string[];
  115. /**
  116. * Gets the label name from a parsable substring of a line
  117. */
  118. getLabelFromField: (field: string) => string;
  119. /**
  120. * Gets the label value from a parsable substring of a line
  121. */
  122. getValueFromField: (field: string) => string;
  123. /**
  124. * Function to verify if this is a valid parser for the given line.
  125. * The parser accepts the line unless it returns undefined.
  126. */
  127. test: (line: string) => any;
  128. }
  129. const LOGFMT_REGEXP = /(?:^|\s)(\w+)=("[^"]*"|\S+)/;
  130. export const LogsParsers: { [name: string]: LogsParser } = {
  131. JSON: {
  132. buildMatcher: label => new RegExp(`(?:{|,)\\s*"${label}"\\s*:\\s*"?([\\d\\.]+|[^"]*)"?`),
  133. getFields: line => {
  134. const fields = [];
  135. try {
  136. const parsed = JSON.parse(line);
  137. _.map(parsed, (value, key) => {
  138. const fieldMatcher = new RegExp(`"${key}"\\s*:\\s*"?${_.escapeRegExp(JSON.stringify(value))}"?`);
  139. const match = line.match(fieldMatcher);
  140. if (match) {
  141. fields.push(match[0]);
  142. }
  143. });
  144. } catch {}
  145. return fields;
  146. },
  147. getLabelFromField: field => (field.match(/^"(\w+)"\s*:/) || [])[1],
  148. getValueFromField: field => (field.match(/:\s*(.*)$/) || [])[1],
  149. test: line => {
  150. try {
  151. return JSON.parse(line);
  152. } catch (error) {}
  153. },
  154. },
  155. logfmt: {
  156. buildMatcher: label => new RegExp(`(?:^|\\s)${label}=("[^"]*"|\\S+)`),
  157. getFields: line => {
  158. const fields = [];
  159. line.replace(new RegExp(LOGFMT_REGEXP, 'g'), substring => {
  160. fields.push(substring.trim());
  161. return '';
  162. });
  163. return fields;
  164. },
  165. getLabelFromField: field => (field.match(LOGFMT_REGEXP) || [])[1],
  166. getValueFromField: field => (field.match(LOGFMT_REGEXP) || [])[2],
  167. test: line => LOGFMT_REGEXP.test(line),
  168. },
  169. };
  170. export function calculateFieldStats(rows: LogRowModel[], extractor: RegExp): LogLabelStatsModel[] {
  171. // Consider only rows that satisfy the matcher
  172. const rowsWithField = rows.filter(row => extractor.test(row.entry));
  173. const rowCount = rowsWithField.length;
  174. // Get field value counts for eligible rows
  175. const countsByValue = _.countBy(rowsWithField, row => (row as LogRowModel).entry.match(extractor)[1]);
  176. const sortedCounts = _.chain(countsByValue)
  177. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  178. .sortBy('count')
  179. .reverse()
  180. .value();
  181. return sortedCounts;
  182. }
  183. export function calculateLogsLabelStats(rows: LogRowModel[], label: string): LogLabelStatsModel[] {
  184. // Consider only rows that have the given label
  185. const rowsWithLabel = rows.filter(row => row.labels[label] !== undefined);
  186. const rowCount = rowsWithLabel.length;
  187. // Get label value counts for eligible rows
  188. const countsByValue = _.countBy(rowsWithLabel, row => (row as LogRowModel).labels[label]);
  189. const sortedCounts = _.chain(countsByValue)
  190. .map((count, value) => ({ count, value, proportion: count / rowCount }))
  191. .sortBy('count')
  192. .reverse()
  193. .value();
  194. return sortedCounts;
  195. }
  196. const isoDateRegexp = /\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-6]\d[,\.]\d+([+-][0-2]\d:[0-5]\d|Z)/g;
  197. function isDuplicateRow(row: LogRowModel, other: LogRowModel, strategy: LogsDedupStrategy): boolean {
  198. switch (strategy) {
  199. case LogsDedupStrategy.exact:
  200. // Exact still strips dates
  201. return row.entry.replace(isoDateRegexp, '') === other.entry.replace(isoDateRegexp, '');
  202. case LogsDedupStrategy.numbers:
  203. return row.entry.replace(/\d/g, '') === other.entry.replace(/\d/g, '');
  204. case LogsDedupStrategy.signature:
  205. return row.entry.replace(/\w/g, '') === other.entry.replace(/\w/g, '');
  206. default:
  207. return false;
  208. }
  209. }
  210. export function dedupLogRows(logs: LogsModel, strategy: LogsDedupStrategy): LogsModel {
  211. if (strategy === LogsDedupStrategy.none) {
  212. return logs;
  213. }
  214. const dedupedRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  215. const rowCopy = { ...row };
  216. const previous = result[result.length - 1];
  217. if (index > 0 && isDuplicateRow(row, previous, strategy)) {
  218. previous.duplicates++;
  219. } else {
  220. rowCopy.duplicates = 0;
  221. result.push(rowCopy);
  222. }
  223. return result;
  224. }, []);
  225. return {
  226. ...logs,
  227. rows: dedupedRows,
  228. };
  229. }
  230. export function getParser(line: string): LogsParser {
  231. let parser;
  232. try {
  233. if (LogsParsers.JSON.test(line)) {
  234. parser = LogsParsers.JSON;
  235. }
  236. } catch (error) {}
  237. if (!parser && LogsParsers.logfmt.test(line)) {
  238. parser = LogsParsers.logfmt;
  239. }
  240. return parser;
  241. }
  242. export function filterLogLevels(logs: LogsModel, hiddenLogLevels: Set<LogLevel>): LogsModel {
  243. if (hiddenLogLevels.size === 0) {
  244. return logs;
  245. }
  246. const filteredRows = logs.rows.reduce((result: LogRowModel[], row: LogRowModel, index, list) => {
  247. if (!hiddenLogLevels.has(row.logLevel)) {
  248. result.push(row);
  249. }
  250. return result;
  251. }, []);
  252. return {
  253. ...logs,
  254. rows: filteredRows,
  255. };
  256. }
  257. export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): TimeSeries[] {
  258. // currently interval is rangeMs / resolution, which is too low for showing series as bars.
  259. // need at least 10px per bucket, so we multiply interval by 10. Should be solved higher up the chain
  260. // when executing queries & interval calculated and not here but this is a temporary fix.
  261. // intervalMs = intervalMs * 10;
  262. // Graph time series by log level
  263. const seriesByLevel = {};
  264. const bucketSize = intervalMs * 10;
  265. const seriesList = [];
  266. for (const row of rows) {
  267. let series = seriesByLevel[row.logLevel];
  268. if (!series) {
  269. seriesByLevel[row.logLevel] = series = {
  270. lastTs: null,
  271. datapoints: [],
  272. alias: row.logLevel,
  273. color: LogLevelColor[row.logLevel],
  274. };
  275. seriesList.push(series);
  276. }
  277. // align time to bucket size
  278. const time = Math.round(row.timeEpochMs / bucketSize) * bucketSize;
  279. // Entry for time
  280. if (time === series.lastTs) {
  281. series.datapoints[series.datapoints.length - 1][0]++;
  282. } else {
  283. series.datapoints.push([1, time]);
  284. series.lastTs = time;
  285. }
  286. // add zero to other levels to aid stacking so each level series has same number of points
  287. for (const other of seriesList) {
  288. if (other !== series && other.lastTs !== time) {
  289. other.datapoints.push([0, time]);
  290. other.lastTs = time;
  291. }
  292. }
  293. }
  294. return seriesList.map(series => {
  295. series.datapoints.sort((a, b) => {
  296. return a[1] - b[1];
  297. });
  298. return {
  299. datapoints: series.datapoints,
  300. target: series.alias,
  301. alias: series.alias,
  302. color: series.color,
  303. };
  304. });
  305. }