logs_model.test.ts 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557
  1. import { DataFrame, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/data';
  2. import {
  3. dedupLogRows,
  4. calculateFieldStats,
  5. calculateLogsLabelStats,
  6. getParser,
  7. LogsParsers,
  8. dataFrameToLogsModel,
  9. } from '../logs_model';
  10. describe('dedupLogRows()', () => {
  11. test('should return rows as is when dedup is set to none', () => {
  12. const logs = {
  13. rows: [
  14. {
  15. entry: 'WARN test 1.23 on [xxx]',
  16. },
  17. {
  18. entry: 'WARN test 1.23 on [xxx]',
  19. },
  20. ],
  21. };
  22. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toMatchObject(logs.rows);
  23. });
  24. test('should dedup on exact matches', () => {
  25. const logs = {
  26. rows: [
  27. {
  28. entry: 'WARN test 1.23 on [xxx]',
  29. },
  30. {
  31. entry: 'WARN test 1.23 on [xxx]',
  32. },
  33. {
  34. entry: 'INFO test 2.44 on [xxx]',
  35. },
  36. {
  37. entry: 'WARN test 1.23 on [xxx]',
  38. },
  39. ],
  40. };
  41. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  42. {
  43. duplicates: 1,
  44. entry: 'WARN test 1.23 on [xxx]',
  45. },
  46. {
  47. duplicates: 0,
  48. entry: 'INFO test 2.44 on [xxx]',
  49. },
  50. {
  51. duplicates: 0,
  52. entry: 'WARN test 1.23 on [xxx]',
  53. },
  54. ]);
  55. });
  56. test('should dedup on number matches', () => {
  57. const logs = {
  58. rows: [
  59. {
  60. entry: 'WARN test 1.2323423 on [xxx]',
  61. },
  62. {
  63. entry: 'WARN test 1.23 on [xxx]',
  64. },
  65. {
  66. entry: 'INFO test 2.44 on [xxx]',
  67. },
  68. {
  69. entry: 'WARN test 1.23 on [xxx]',
  70. },
  71. ],
  72. };
  73. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.numbers).rows).toEqual([
  74. {
  75. duplicates: 1,
  76. entry: 'WARN test 1.2323423 on [xxx]',
  77. },
  78. {
  79. duplicates: 0,
  80. entry: 'INFO test 2.44 on [xxx]',
  81. },
  82. {
  83. duplicates: 0,
  84. entry: 'WARN test 1.23 on [xxx]',
  85. },
  86. ]);
  87. });
  88. test('should dedup on signature matches', () => {
  89. const logs = {
  90. rows: [
  91. {
  92. entry: 'WARN test 1.2323423 on [xxx]',
  93. },
  94. {
  95. entry: 'WARN test 1.23 on [xxx]',
  96. },
  97. {
  98. entry: 'INFO test 2.44 on [xxx]',
  99. },
  100. {
  101. entry: 'WARN test 1.23 on [xxx]',
  102. },
  103. ],
  104. };
  105. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.signature).rows).toEqual([
  106. {
  107. duplicates: 3,
  108. entry: 'WARN test 1.2323423 on [xxx]',
  109. },
  110. ]);
  111. });
  112. test('should return to non-deduped state on same log result', () => {
  113. const logs = {
  114. rows: [
  115. {
  116. entry: 'INFO 123',
  117. },
  118. {
  119. entry: 'WARN 123',
  120. },
  121. {
  122. entry: 'WARN 123',
  123. },
  124. ],
  125. };
  126. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  127. {
  128. duplicates: 0,
  129. entry: 'INFO 123',
  130. },
  131. {
  132. duplicates: 1,
  133. entry: 'WARN 123',
  134. },
  135. ]);
  136. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toEqual(logs.rows);
  137. });
  138. });
  139. describe('calculateFieldStats()', () => {
  140. test('should return no stats for empty rows', () => {
  141. expect(calculateFieldStats([], /foo=(.*)/)).toEqual([]);
  142. });
  143. test('should return no stats if extractor does not match', () => {
  144. const rows = [
  145. {
  146. entry: 'foo=bar',
  147. },
  148. ];
  149. expect(calculateFieldStats(rows as any, /baz=(.*)/)).toEqual([]);
  150. });
  151. test('should return stats for found field', () => {
  152. const rows = [
  153. {
  154. entry: 'foo="42 + 1"',
  155. },
  156. {
  157. entry: 'foo=503 baz=foo',
  158. },
  159. {
  160. entry: 'foo="42 + 1"',
  161. },
  162. {
  163. entry: 't=2018-12-05T07:44:59+0000 foo=503',
  164. },
  165. ];
  166. expect(calculateFieldStats(rows as any, /foo=("[^"]*"|\S+)/)).toMatchObject([
  167. {
  168. value: '"42 + 1"',
  169. count: 2,
  170. },
  171. {
  172. value: '503',
  173. count: 2,
  174. },
  175. ]);
  176. });
  177. });
  178. describe('calculateLogsLabelStats()', () => {
  179. test('should return no stats for empty rows', () => {
  180. expect(calculateLogsLabelStats([], '')).toEqual([]);
  181. });
  182. test('should return no stats of label is not found', () => {
  183. const rows = [
  184. {
  185. entry: 'foo 1',
  186. labels: {
  187. foo: 'bar',
  188. },
  189. },
  190. ];
  191. expect(calculateLogsLabelStats(rows as any, 'baz')).toEqual([]);
  192. });
  193. test('should return stats for found labels', () => {
  194. const rows = [
  195. {
  196. entry: 'foo 1',
  197. labels: {
  198. foo: 'bar',
  199. },
  200. },
  201. {
  202. entry: 'foo 0',
  203. labels: {
  204. foo: 'xxx',
  205. },
  206. },
  207. {
  208. entry: 'foo 2',
  209. labels: {
  210. foo: 'bar',
  211. },
  212. },
  213. ];
  214. expect(calculateLogsLabelStats(rows as any, 'foo')).toMatchObject([
  215. {
  216. value: 'bar',
  217. count: 2,
  218. },
  219. {
  220. value: 'xxx',
  221. count: 1,
  222. },
  223. ]);
  224. });
  225. });
  226. describe('getParser()', () => {
  227. test('should return no parser on empty line', () => {
  228. expect(getParser('')).toBeUndefined();
  229. });
  230. test('should return no parser on unknown line pattern', () => {
  231. expect(getParser('To Be or not to be')).toBeUndefined();
  232. });
  233. test('should return logfmt parser on key value patterns', () => {
  234. expect(getParser('foo=bar baz="41 + 1')).toEqual(LogsParsers.logfmt);
  235. });
  236. test('should return JSON parser on JSON log lines', () => {
  237. // TODO implement other JSON value types than string
  238. expect(getParser('{"foo": "bar", "baz": "41 + 1"}')).toEqual(LogsParsers.JSON);
  239. });
  240. });
  241. describe('LogsParsers', () => {
  242. describe('logfmt', () => {
  243. const parser = LogsParsers.logfmt;
  244. test('should detect format', () => {
  245. expect(parser.test('foo')).toBeFalsy();
  246. expect(parser.test('foo=bar')).toBeTruthy();
  247. });
  248. test('should return parsed fields', () => {
  249. expect(parser.getFields('foo=bar baz="42 + 1"')).toEqual(['foo=bar', 'baz="42 + 1"']);
  250. });
  251. test('should return label for field', () => {
  252. expect(parser.getLabelFromField('foo=bar')).toBe('foo');
  253. });
  254. test('should return value for field', () => {
  255. expect(parser.getValueFromField('foo=bar')).toBe('bar');
  256. });
  257. test('should build a valid value matcher', () => {
  258. const matcher = parser.buildMatcher('foo');
  259. const match = 'foo=bar'.match(matcher);
  260. expect(match).toBeDefined();
  261. expect(match[1]).toBe('bar');
  262. });
  263. });
  264. describe('JSON', () => {
  265. const parser = LogsParsers.JSON;
  266. test('should detect format', () => {
  267. expect(parser.test('foo')).toBeFalsy();
  268. expect(parser.test('{"foo":"bar"}')).toBeTruthy();
  269. });
  270. test('should return parsed fields', () => {
  271. expect(parser.getFields('{ "foo" : "bar", "baz" : 42 }')).toEqual(['"foo" : "bar"', '"baz" : 42']);
  272. });
  273. test('should return parsed fields for nested quotes', () => {
  274. expect(parser.getFields(`{"foo":"bar: '[value=\\"42\\"]'"}`)).toEqual([`"foo":"bar: '[value=\\"42\\"]'"`]);
  275. });
  276. test('should return label for field', () => {
  277. expect(parser.getLabelFromField('"foo" : "bar"')).toBe('foo');
  278. });
  279. test('should return value for field', () => {
  280. expect(parser.getValueFromField('"foo" : "bar"')).toBe('"bar"');
  281. expect(parser.getValueFromField('"foo" : 42')).toBe('42');
  282. expect(parser.getValueFromField('"foo" : 42.1')).toBe('42.1');
  283. });
  284. test('should build a valid value matcher for strings', () => {
  285. const matcher = parser.buildMatcher('foo');
  286. const match = '{"foo":"bar"}'.match(matcher);
  287. expect(match).toBeDefined();
  288. expect(match[1]).toBe('bar');
  289. });
  290. test('should build a valid value matcher for integers', () => {
  291. const matcher = parser.buildMatcher('foo');
  292. const match = '{"foo":42.1}'.match(matcher);
  293. expect(match).toBeDefined();
  294. expect(match[1]).toBe('42.1');
  295. });
  296. });
  297. });
  298. const emptyLogsModel: any = {
  299. hasUniqueLabels: false,
  300. rows: [],
  301. meta: [],
  302. series: [],
  303. };
  304. describe('dataFrameToLogsModel', () => {
  305. it('given empty series should return empty logs model', () => {
  306. expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
  307. });
  308. it('given series without correct series name should return empty logs model', () => {
  309. const series: DataFrame[] = [
  310. {
  311. fields: [],
  312. rows: [],
  313. },
  314. ];
  315. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  316. });
  317. it('given series without a time field should return empty logs model', () => {
  318. const series: DataFrame[] = [
  319. {
  320. fields: [
  321. {
  322. name: 'message',
  323. type: FieldType.string,
  324. },
  325. ],
  326. rows: [],
  327. },
  328. ];
  329. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  330. });
  331. it('given series without a string field should return empty logs model', () => {
  332. const series: DataFrame[] = [
  333. {
  334. fields: [
  335. {
  336. name: 'time',
  337. type: FieldType.time,
  338. },
  339. ],
  340. rows: [],
  341. },
  342. ];
  343. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  344. });
  345. it('given one series should return expected logs model', () => {
  346. const series: DataFrame[] = [
  347. {
  348. labels: {
  349. filename: '/var/log/grafana/grafana.log',
  350. job: 'grafana',
  351. },
  352. fields: [
  353. {
  354. name: 'time',
  355. type: FieldType.time,
  356. },
  357. {
  358. name: 'message',
  359. type: FieldType.string,
  360. },
  361. ],
  362. rows: [
  363. [
  364. '2019-04-26T09:28:11.352440161Z',
  365. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  366. ],
  367. [
  368. '2019-04-26T14:42:50.991981292Z',
  369. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  370. ],
  371. ],
  372. meta: {
  373. limit: 1000,
  374. },
  375. },
  376. ];
  377. const logsModel = dataFrameToLogsModel(series, 0);
  378. expect(logsModel.hasUniqueLabels).toBeFalsy();
  379. expect(logsModel.rows).toHaveLength(2);
  380. expect(logsModel.rows).toMatchObject([
  381. {
  382. timestamp: '2019-04-26T09:28:11.352440161Z',
  383. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  384. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  385. logLevel: 'info',
  386. uniqueLabels: {},
  387. },
  388. {
  389. timestamp: '2019-04-26T14:42:50.991981292Z',
  390. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  391. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  392. logLevel: 'error',
  393. uniqueLabels: {},
  394. },
  395. ]);
  396. expect(logsModel.series).toHaveLength(2);
  397. expect(logsModel.meta).toHaveLength(2);
  398. expect(logsModel.meta[0]).toMatchObject({
  399. label: 'Common labels',
  400. value: series[0].labels,
  401. kind: LogsMetaKind.LabelsMap,
  402. });
  403. expect(logsModel.meta[1]).toMatchObject({
  404. label: 'Limit',
  405. value: `1000 (2 returned)`,
  406. kind: LogsMetaKind.String,
  407. });
  408. });
  409. it('given one series without labels should return expected logs model', () => {
  410. const series: DataFrame[] = [
  411. {
  412. fields: [
  413. {
  414. name: 'time',
  415. type: FieldType.time,
  416. },
  417. {
  418. name: 'message',
  419. type: FieldType.string,
  420. },
  421. {
  422. name: 'level',
  423. type: FieldType.string,
  424. },
  425. ],
  426. rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']],
  427. },
  428. ];
  429. const logsModel = dataFrameToLogsModel(series, 0);
  430. expect(logsModel.rows).toHaveLength(1);
  431. expect(logsModel.rows).toMatchObject([
  432. {
  433. entry: 'WARN boooo',
  434. labels: undefined,
  435. logLevel: LogLevel.debug,
  436. uniqueLabels: {},
  437. },
  438. ]);
  439. });
  440. it('given multiple series should return expected logs model', () => {
  441. const series: DataFrame[] = [
  442. {
  443. labels: {
  444. foo: 'bar',
  445. baz: '1',
  446. level: 'dbug',
  447. },
  448. fields: [
  449. {
  450. name: 'ts',
  451. type: FieldType.time,
  452. },
  453. {
  454. name: 'line',
  455. type: FieldType.string,
  456. },
  457. ],
  458. rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
  459. },
  460. {
  461. name: 'logs',
  462. labels: {
  463. foo: 'bar',
  464. baz: '2',
  465. level: 'err',
  466. },
  467. fields: [
  468. {
  469. name: 'time',
  470. type: FieldType.time,
  471. },
  472. {
  473. name: 'message',
  474. type: FieldType.string,
  475. },
  476. ],
  477. rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
  478. },
  479. ];
  480. const logsModel = dataFrameToLogsModel(series, 0);
  481. expect(logsModel.hasUniqueLabels).toBeTruthy();
  482. expect(logsModel.rows).toHaveLength(3);
  483. expect(logsModel.rows).toMatchObject([
  484. {
  485. entry: 'WARN boooo',
  486. labels: { foo: 'bar', baz: '1' },
  487. logLevel: LogLevel.debug,
  488. uniqueLabels: { baz: '1' },
  489. },
  490. {
  491. entry: 'INFO 1',
  492. labels: { foo: 'bar', baz: '2' },
  493. logLevel: LogLevel.error,
  494. uniqueLabels: { baz: '2' },
  495. },
  496. {
  497. entry: 'INFO 2',
  498. labels: { foo: 'bar', baz: '2' },
  499. logLevel: LogLevel.error,
  500. uniqueLabels: { baz: '2' },
  501. },
  502. ]);
  503. expect(logsModel.series).toHaveLength(2);
  504. expect(logsModel.meta).toHaveLength(1);
  505. expect(logsModel.meta[0]).toMatchObject({
  506. label: 'Common labels',
  507. value: {
  508. foo: 'bar',
  509. },
  510. kind: LogsMetaKind.LabelsMap,
  511. });
  512. });
  513. });