logs_model.test.ts 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547
  1. import {
  2. calculateFieldStats,
  3. calculateLogsLabelStats,
  4. dedupLogRows,
  5. getParser,
  6. LogsDedupStrategy,
  7. LogsModel,
  8. LogsParsers,
  9. seriesDataToLogsModel,
  10. LogsMetaKind,
  11. } from '../logs_model';
  12. import { SeriesData, FieldType } from '@grafana/ui';
  13. describe('dedupLogRows()', () => {
  14. test('should return rows as is when dedup is set to none', () => {
  15. const logs = {
  16. rows: [
  17. {
  18. entry: 'WARN test 1.23 on [xxx]',
  19. },
  20. {
  21. entry: 'WARN test 1.23 on [xxx]',
  22. },
  23. ],
  24. };
  25. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toMatchObject(logs.rows);
  26. });
  27. test('should dedup on exact matches', () => {
  28. const logs = {
  29. rows: [
  30. {
  31. entry: 'WARN test 1.23 on [xxx]',
  32. },
  33. {
  34. entry: 'WARN test 1.23 on [xxx]',
  35. },
  36. {
  37. entry: 'INFO test 2.44 on [xxx]',
  38. },
  39. {
  40. entry: 'WARN test 1.23 on [xxx]',
  41. },
  42. ],
  43. };
  44. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  45. {
  46. duplicates: 1,
  47. entry: 'WARN test 1.23 on [xxx]',
  48. },
  49. {
  50. duplicates: 0,
  51. entry: 'INFO test 2.44 on [xxx]',
  52. },
  53. {
  54. duplicates: 0,
  55. entry: 'WARN test 1.23 on [xxx]',
  56. },
  57. ]);
  58. });
  59. test('should dedup on number matches', () => {
  60. const logs = {
  61. rows: [
  62. {
  63. entry: 'WARN test 1.2323423 on [xxx]',
  64. },
  65. {
  66. entry: 'WARN test 1.23 on [xxx]',
  67. },
  68. {
  69. entry: 'INFO test 2.44 on [xxx]',
  70. },
  71. {
  72. entry: 'WARN test 1.23 on [xxx]',
  73. },
  74. ],
  75. };
  76. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.numbers).rows).toEqual([
  77. {
  78. duplicates: 1,
  79. entry: 'WARN test 1.2323423 on [xxx]',
  80. },
  81. {
  82. duplicates: 0,
  83. entry: 'INFO test 2.44 on [xxx]',
  84. },
  85. {
  86. duplicates: 0,
  87. entry: 'WARN test 1.23 on [xxx]',
  88. },
  89. ]);
  90. });
  91. test('should dedup on signature matches', () => {
  92. const logs = {
  93. rows: [
  94. {
  95. entry: 'WARN test 1.2323423 on [xxx]',
  96. },
  97. {
  98. entry: 'WARN test 1.23 on [xxx]',
  99. },
  100. {
  101. entry: 'INFO test 2.44 on [xxx]',
  102. },
  103. {
  104. entry: 'WARN test 1.23 on [xxx]',
  105. },
  106. ],
  107. };
  108. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.signature).rows).toEqual([
  109. {
  110. duplicates: 3,
  111. entry: 'WARN test 1.2323423 on [xxx]',
  112. },
  113. ]);
  114. });
  115. test('should return to non-deduped state on same log result', () => {
  116. const logs = {
  117. rows: [
  118. {
  119. entry: 'INFO 123',
  120. },
  121. {
  122. entry: 'WARN 123',
  123. },
  124. {
  125. entry: 'WARN 123',
  126. },
  127. ],
  128. };
  129. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  130. {
  131. duplicates: 0,
  132. entry: 'INFO 123',
  133. },
  134. {
  135. duplicates: 1,
  136. entry: 'WARN 123',
  137. },
  138. ]);
  139. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toEqual(logs.rows);
  140. });
  141. });
  142. describe('calculateFieldStats()', () => {
  143. test('should return no stats for empty rows', () => {
  144. expect(calculateFieldStats([], /foo=(.*)/)).toEqual([]);
  145. });
  146. test('should return no stats if extractor does not match', () => {
  147. const rows = [
  148. {
  149. entry: 'foo=bar',
  150. },
  151. ];
  152. expect(calculateFieldStats(rows as any, /baz=(.*)/)).toEqual([]);
  153. });
  154. test('should return stats for found field', () => {
  155. const rows = [
  156. {
  157. entry: 'foo="42 + 1"',
  158. },
  159. {
  160. entry: 'foo=503 baz=foo',
  161. },
  162. {
  163. entry: 'foo="42 + 1"',
  164. },
  165. {
  166. entry: 't=2018-12-05T07:44:59+0000 foo=503',
  167. },
  168. ];
  169. expect(calculateFieldStats(rows as any, /foo=("[^"]*"|\S+)/)).toMatchObject([
  170. {
  171. value: '"42 + 1"',
  172. count: 2,
  173. },
  174. {
  175. value: '503',
  176. count: 2,
  177. },
  178. ]);
  179. });
  180. });
  181. describe('calculateLogsLabelStats()', () => {
  182. test('should return no stats for empty rows', () => {
  183. expect(calculateLogsLabelStats([], '')).toEqual([]);
  184. });
  185. test('should return no stats of label is not found', () => {
  186. const rows = [
  187. {
  188. entry: 'foo 1',
  189. labels: {
  190. foo: 'bar',
  191. },
  192. },
  193. ];
  194. expect(calculateLogsLabelStats(rows as any, 'baz')).toEqual([]);
  195. });
  196. test('should return stats for found labels', () => {
  197. const rows = [
  198. {
  199. entry: 'foo 1',
  200. labels: {
  201. foo: 'bar',
  202. },
  203. },
  204. {
  205. entry: 'foo 0',
  206. labels: {
  207. foo: 'xxx',
  208. },
  209. },
  210. {
  211. entry: 'foo 2',
  212. labels: {
  213. foo: 'bar',
  214. },
  215. },
  216. ];
  217. expect(calculateLogsLabelStats(rows as any, 'foo')).toMatchObject([
  218. {
  219. value: 'bar',
  220. count: 2,
  221. },
  222. {
  223. value: 'xxx',
  224. count: 1,
  225. },
  226. ]);
  227. });
  228. });
  229. describe('getParser()', () => {
  230. test('should return no parser on empty line', () => {
  231. expect(getParser('')).toBeUndefined();
  232. });
  233. test('should return no parser on unknown line pattern', () => {
  234. expect(getParser('To Be or not to be')).toBeUndefined();
  235. });
  236. test('should return logfmt parser on key value patterns', () => {
  237. expect(getParser('foo=bar baz="41 + 1')).toEqual(LogsParsers.logfmt);
  238. });
  239. test('should return JSON parser on JSON log lines', () => {
  240. // TODO implement other JSON value types than string
  241. expect(getParser('{"foo": "bar", "baz": "41 + 1"}')).toEqual(LogsParsers.JSON);
  242. });
  243. });
  244. describe('LogsParsers', () => {
  245. describe('logfmt', () => {
  246. const parser = LogsParsers.logfmt;
  247. test('should detect format', () => {
  248. expect(parser.test('foo')).toBeFalsy();
  249. expect(parser.test('foo=bar')).toBeTruthy();
  250. });
  251. test('should return parsed fields', () => {
  252. expect(parser.getFields('foo=bar baz="42 + 1"')).toEqual(['foo=bar', 'baz="42 + 1"']);
  253. });
  254. test('should return label for field', () => {
  255. expect(parser.getLabelFromField('foo=bar')).toBe('foo');
  256. });
  257. test('should return value for field', () => {
  258. expect(parser.getValueFromField('foo=bar')).toBe('bar');
  259. });
  260. test('should build a valid value matcher', () => {
  261. const matcher = parser.buildMatcher('foo');
  262. const match = 'foo=bar'.match(matcher);
  263. expect(match).toBeDefined();
  264. expect(match[1]).toBe('bar');
  265. });
  266. });
  267. describe('JSON', () => {
  268. const parser = LogsParsers.JSON;
  269. test('should detect format', () => {
  270. expect(parser.test('foo')).toBeFalsy();
  271. expect(parser.test('{"foo":"bar"}')).toBeTruthy();
  272. });
  273. test('should return parsed fields', () => {
  274. expect(parser.getFields('{ "foo" : "bar", "baz" : 42 }')).toEqual(['"foo" : "bar"', '"baz" : 42']);
  275. });
  276. test('should return parsed fields for nested quotes', () => {
  277. expect(parser.getFields(`{"foo":"bar: '[value=\\"42\\"]'"}`)).toEqual([`"foo":"bar: '[value=\\"42\\"]'"`]);
  278. });
  279. test('should return label for field', () => {
  280. expect(parser.getLabelFromField('"foo" : "bar"')).toBe('foo');
  281. });
  282. test('should return value for field', () => {
  283. expect(parser.getValueFromField('"foo" : "bar"')).toBe('"bar"');
  284. expect(parser.getValueFromField('"foo" : 42')).toBe('42');
  285. expect(parser.getValueFromField('"foo" : 42.1')).toBe('42.1');
  286. });
  287. test('should build a valid value matcher for strings', () => {
  288. const matcher = parser.buildMatcher('foo');
  289. const match = '{"foo":"bar"}'.match(matcher);
  290. expect(match).toBeDefined();
  291. expect(match[1]).toBe('bar');
  292. });
  293. test('should build a valid value matcher for integers', () => {
  294. const matcher = parser.buildMatcher('foo');
  295. const match = '{"foo":42.1}'.match(matcher);
  296. expect(match).toBeDefined();
  297. expect(match[1]).toBe('42.1');
  298. });
  299. });
  300. });
  301. describe('seriesDataToLogsModel', () => {
  302. it('given empty series should return undefined', () => {
  303. expect(seriesDataToLogsModel([] as SeriesData[], 0)).toBeUndefined();
  304. });
  305. it('given series without correct series name should not be processed', () => {
  306. const series: SeriesData[] = [
  307. {
  308. fields: [],
  309. rows: [],
  310. },
  311. ];
  312. expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
  313. });
  314. it('given series without a time field should not be processed', () => {
  315. const series: SeriesData[] = [
  316. {
  317. fields: [
  318. {
  319. name: 'message',
  320. type: FieldType.string,
  321. },
  322. ],
  323. rows: [],
  324. },
  325. ];
  326. expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
  327. });
  328. it('given series without a string field should not be processed', () => {
  329. const series: SeriesData[] = [
  330. {
  331. fields: [
  332. {
  333. name: 'time',
  334. type: FieldType.time,
  335. },
  336. ],
  337. rows: [],
  338. },
  339. ];
  340. expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
  341. });
  342. it('given one series should return expected logs model', () => {
  343. const series: SeriesData[] = [
  344. {
  345. labels: {
  346. filename: '/var/log/grafana/grafana.log',
  347. job: 'grafana',
  348. },
  349. fields: [
  350. {
  351. name: 'time',
  352. type: FieldType.time,
  353. },
  354. {
  355. name: 'message',
  356. type: FieldType.string,
  357. },
  358. ],
  359. rows: [
  360. [
  361. '2019-04-26T09:28:11.352440161Z',
  362. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  363. ],
  364. [
  365. '2019-04-26T14:42:50.991981292Z',
  366. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  367. ],
  368. ],
  369. meta: {
  370. limit: 1000,
  371. },
  372. },
  373. ];
  374. const logsModel = seriesDataToLogsModel(series, 0);
  375. expect(logsModel.hasUniqueLabels).toBeFalsy();
  376. expect(logsModel.rows).toHaveLength(2);
  377. expect(logsModel.rows).toMatchObject([
  378. {
  379. timestamp: '2019-04-26T14:42:50.991981292Z',
  380. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  381. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  382. logLevel: 'error',
  383. uniqueLabels: {},
  384. },
  385. {
  386. timestamp: '2019-04-26T09:28:11.352440161Z',
  387. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  388. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  389. logLevel: 'info',
  390. uniqueLabels: {},
  391. },
  392. ]);
  393. expect(logsModel.series).toHaveLength(2);
  394. expect(logsModel.meta).toHaveLength(2);
  395. expect(logsModel.meta[0]).toMatchObject({
  396. label: 'Common labels',
  397. value: series[0].labels,
  398. kind: LogsMetaKind.LabelsMap,
  399. });
  400. expect(logsModel.meta[1]).toMatchObject({
  401. label: 'Limit',
  402. value: `1000 (2 returned)`,
  403. kind: LogsMetaKind.String,
  404. });
  405. });
  406. it('given one series without labels should return expected logs model', () => {
  407. const series: SeriesData[] = [
  408. {
  409. fields: [
  410. {
  411. name: 'time',
  412. type: FieldType.time,
  413. },
  414. {
  415. name: 'message',
  416. type: FieldType.string,
  417. },
  418. ],
  419. rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
  420. },
  421. ];
  422. const logsModel = seriesDataToLogsModel(series, 0);
  423. expect(logsModel.rows).toHaveLength(1);
  424. expect(logsModel.rows).toMatchObject([
  425. {
  426. entry: 'WARN boooo',
  427. labels: undefined,
  428. logLevel: 'warning',
  429. uniqueLabels: {},
  430. },
  431. ]);
  432. });
  433. it('given multiple series should return expected logs model', () => {
  434. const series: SeriesData[] = [
  435. {
  436. labels: {
  437. foo: 'bar',
  438. baz: '1',
  439. },
  440. fields: [
  441. {
  442. name: 'ts',
  443. type: FieldType.time,
  444. },
  445. {
  446. name: 'line',
  447. type: FieldType.string,
  448. },
  449. ],
  450. rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
  451. },
  452. {
  453. name: 'logs',
  454. labels: {
  455. foo: 'bar',
  456. baz: '2',
  457. },
  458. fields: [
  459. {
  460. name: 'time',
  461. type: FieldType.time,
  462. },
  463. {
  464. name: 'message',
  465. type: FieldType.string,
  466. },
  467. ],
  468. rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
  469. },
  470. ];
  471. const logsModel = seriesDataToLogsModel(series, 0);
  472. expect(logsModel.hasUniqueLabels).toBeTruthy();
  473. expect(logsModel.rows).toHaveLength(3);
  474. expect(logsModel.rows).toMatchObject([
  475. {
  476. entry: 'INFO 2',
  477. labels: { foo: 'bar', baz: '2' },
  478. logLevel: 'info',
  479. uniqueLabels: { baz: '2' },
  480. },
  481. {
  482. entry: 'WARN boooo',
  483. labels: { foo: 'bar', baz: '1' },
  484. logLevel: 'warning',
  485. uniqueLabels: { baz: '1' },
  486. },
  487. {
  488. entry: 'INFO 1',
  489. labels: { foo: 'bar', baz: '2' },
  490. logLevel: 'info',
  491. uniqueLabels: { baz: '2' },
  492. },
  493. ]);
  494. expect(logsModel.series).toHaveLength(2);
  495. expect(logsModel.meta).toHaveLength(1);
  496. expect(logsModel.meta[0]).toMatchObject({
  497. label: 'Common labels',
  498. value: {
  499. foo: 'bar',
  500. },
  501. kind: LogsMetaKind.LabelsMap,
  502. });
  503. });
  504. });