logs_model.test.ts 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564
  1. import {
  2. DataFrame,
  3. FieldType,
  4. LogsModel,
  5. LogsMetaKind,
  6. LogsDedupStrategy,
  7. LogLevel,
  8. DataFrameHelper,
  9. toDataFrame,
  10. } from '@grafana/data';
  11. import {
  12. dedupLogRows,
  13. calculateFieldStats,
  14. calculateLogsLabelStats,
  15. getParser,
  16. LogsParsers,
  17. dataFrameToLogsModel,
  18. } from '../logs_model';
  19. describe('dedupLogRows()', () => {
  20. test('should return rows as is when dedup is set to none', () => {
  21. const logs = {
  22. rows: [
  23. {
  24. entry: 'WARN test 1.23 on [xxx]',
  25. },
  26. {
  27. entry: 'WARN test 1.23 on [xxx]',
  28. },
  29. ],
  30. };
  31. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toMatchObject(logs.rows);
  32. });
  33. test('should dedup on exact matches', () => {
  34. const logs = {
  35. rows: [
  36. {
  37. entry: 'WARN test 1.23 on [xxx]',
  38. },
  39. {
  40. entry: 'WARN test 1.23 on [xxx]',
  41. },
  42. {
  43. entry: 'INFO test 2.44 on [xxx]',
  44. },
  45. {
  46. entry: 'WARN test 1.23 on [xxx]',
  47. },
  48. ],
  49. };
  50. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  51. {
  52. duplicates: 1,
  53. entry: 'WARN test 1.23 on [xxx]',
  54. },
  55. {
  56. duplicates: 0,
  57. entry: 'INFO test 2.44 on [xxx]',
  58. },
  59. {
  60. duplicates: 0,
  61. entry: 'WARN test 1.23 on [xxx]',
  62. },
  63. ]);
  64. });
  65. test('should dedup on number matches', () => {
  66. const logs = {
  67. rows: [
  68. {
  69. entry: 'WARN test 1.2323423 on [xxx]',
  70. },
  71. {
  72. entry: 'WARN test 1.23 on [xxx]',
  73. },
  74. {
  75. entry: 'INFO test 2.44 on [xxx]',
  76. },
  77. {
  78. entry: 'WARN test 1.23 on [xxx]',
  79. },
  80. ],
  81. };
  82. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.numbers).rows).toEqual([
  83. {
  84. duplicates: 1,
  85. entry: 'WARN test 1.2323423 on [xxx]',
  86. },
  87. {
  88. duplicates: 0,
  89. entry: 'INFO test 2.44 on [xxx]',
  90. },
  91. {
  92. duplicates: 0,
  93. entry: 'WARN test 1.23 on [xxx]',
  94. },
  95. ]);
  96. });
  97. test('should dedup on signature matches', () => {
  98. const logs = {
  99. rows: [
  100. {
  101. entry: 'WARN test 1.2323423 on [xxx]',
  102. },
  103. {
  104. entry: 'WARN test 1.23 on [xxx]',
  105. },
  106. {
  107. entry: 'INFO test 2.44 on [xxx]',
  108. },
  109. {
  110. entry: 'WARN test 1.23 on [xxx]',
  111. },
  112. ],
  113. };
  114. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.signature).rows).toEqual([
  115. {
  116. duplicates: 3,
  117. entry: 'WARN test 1.2323423 on [xxx]',
  118. },
  119. ]);
  120. });
  121. test('should return to non-deduped state on same log result', () => {
  122. const logs = {
  123. rows: [
  124. {
  125. entry: 'INFO 123',
  126. },
  127. {
  128. entry: 'WARN 123',
  129. },
  130. {
  131. entry: 'WARN 123',
  132. },
  133. ],
  134. };
  135. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.exact).rows).toEqual([
  136. {
  137. duplicates: 0,
  138. entry: 'INFO 123',
  139. },
  140. {
  141. duplicates: 1,
  142. entry: 'WARN 123',
  143. },
  144. ]);
  145. expect(dedupLogRows(logs as LogsModel, LogsDedupStrategy.none).rows).toEqual(logs.rows);
  146. });
  147. });
  148. describe('calculateFieldStats()', () => {
  149. test('should return no stats for empty rows', () => {
  150. expect(calculateFieldStats([], /foo=(.*)/)).toEqual([]);
  151. });
  152. test('should return no stats if extractor does not match', () => {
  153. const rows = [
  154. {
  155. entry: 'foo=bar',
  156. },
  157. ];
  158. expect(calculateFieldStats(rows as any, /baz=(.*)/)).toEqual([]);
  159. });
  160. test('should return stats for found field', () => {
  161. const rows = [
  162. {
  163. entry: 'foo="42 + 1"',
  164. },
  165. {
  166. entry: 'foo=503 baz=foo',
  167. },
  168. {
  169. entry: 'foo="42 + 1"',
  170. },
  171. {
  172. entry: 't=2018-12-05T07:44:59+0000 foo=503',
  173. },
  174. ];
  175. expect(calculateFieldStats(rows as any, /foo=("[^"]*"|\S+)/)).toMatchObject([
  176. {
  177. value: '"42 + 1"',
  178. count: 2,
  179. },
  180. {
  181. value: '503',
  182. count: 2,
  183. },
  184. ]);
  185. });
  186. });
  187. describe('calculateLogsLabelStats()', () => {
  188. test('should return no stats for empty rows', () => {
  189. expect(calculateLogsLabelStats([], '')).toEqual([]);
  190. });
  191. test('should return no stats of label is not found', () => {
  192. const rows = [
  193. {
  194. entry: 'foo 1',
  195. labels: {
  196. foo: 'bar',
  197. },
  198. },
  199. ];
  200. expect(calculateLogsLabelStats(rows as any, 'baz')).toEqual([]);
  201. });
  202. test('should return stats for found labels', () => {
  203. const rows = [
  204. {
  205. entry: 'foo 1',
  206. labels: {
  207. foo: 'bar',
  208. },
  209. },
  210. {
  211. entry: 'foo 0',
  212. labels: {
  213. foo: 'xxx',
  214. },
  215. },
  216. {
  217. entry: 'foo 2',
  218. labels: {
  219. foo: 'bar',
  220. },
  221. },
  222. ];
  223. expect(calculateLogsLabelStats(rows as any, 'foo')).toMatchObject([
  224. {
  225. value: 'bar',
  226. count: 2,
  227. },
  228. {
  229. value: 'xxx',
  230. count: 1,
  231. },
  232. ]);
  233. });
  234. });
  235. describe('getParser()', () => {
  236. test('should return no parser on empty line', () => {
  237. expect(getParser('')).toBeUndefined();
  238. });
  239. test('should return no parser on unknown line pattern', () => {
  240. expect(getParser('To Be or not to be')).toBeUndefined();
  241. });
  242. test('should return logfmt parser on key value patterns', () => {
  243. expect(getParser('foo=bar baz="41 + 1')).toEqual(LogsParsers.logfmt);
  244. });
  245. test('should return JSON parser on JSON log lines', () => {
  246. // TODO implement other JSON value types than string
  247. expect(getParser('{"foo": "bar", "baz": "41 + 1"}')).toEqual(LogsParsers.JSON);
  248. });
  249. });
  250. describe('LogsParsers', () => {
  251. describe('logfmt', () => {
  252. const parser = LogsParsers.logfmt;
  253. test('should detect format', () => {
  254. expect(parser.test('foo')).toBeFalsy();
  255. expect(parser.test('foo=bar')).toBeTruthy();
  256. });
  257. test('should return parsed fields', () => {
  258. expect(parser.getFields('foo=bar baz="42 + 1"')).toEqual(['foo=bar', 'baz="42 + 1"']);
  259. });
  260. test('should return label for field', () => {
  261. expect(parser.getLabelFromField('foo=bar')).toBe('foo');
  262. });
  263. test('should return value for field', () => {
  264. expect(parser.getValueFromField('foo=bar')).toBe('bar');
  265. });
  266. test('should build a valid value matcher', () => {
  267. const matcher = parser.buildMatcher('foo');
  268. const match = 'foo=bar'.match(matcher);
  269. expect(match).toBeDefined();
  270. expect(match[1]).toBe('bar');
  271. });
  272. });
  273. describe('JSON', () => {
  274. const parser = LogsParsers.JSON;
  275. test('should detect format', () => {
  276. expect(parser.test('foo')).toBeFalsy();
  277. expect(parser.test('{"foo":"bar"}')).toBeTruthy();
  278. });
  279. test('should return parsed fields', () => {
  280. expect(parser.getFields('{ "foo" : "bar", "baz" : 42 }')).toEqual(['"foo" : "bar"', '"baz" : 42']);
  281. });
  282. test('should return parsed fields for nested quotes', () => {
  283. expect(parser.getFields(`{"foo":"bar: '[value=\\"42\\"]'"}`)).toEqual([`"foo":"bar: '[value=\\"42\\"]'"`]);
  284. });
  285. test('should return label for field', () => {
  286. expect(parser.getLabelFromField('"foo" : "bar"')).toBe('foo');
  287. });
  288. test('should return value for field', () => {
  289. expect(parser.getValueFromField('"foo" : "bar"')).toBe('"bar"');
  290. expect(parser.getValueFromField('"foo" : 42')).toBe('42');
  291. expect(parser.getValueFromField('"foo" : 42.1')).toBe('42.1');
  292. });
  293. test('should build a valid value matcher for strings', () => {
  294. const matcher = parser.buildMatcher('foo');
  295. const match = '{"foo":"bar"}'.match(matcher);
  296. expect(match).toBeDefined();
  297. expect(match[1]).toBe('bar');
  298. });
  299. test('should build a valid value matcher for integers', () => {
  300. const matcher = parser.buildMatcher('foo');
  301. const match = '{"foo":42.1}'.match(matcher);
  302. expect(match).toBeDefined();
  303. expect(match[1]).toBe('42.1');
  304. });
  305. });
  306. });
  307. const emptyLogsModel: any = {
  308. hasUniqueLabels: false,
  309. rows: [],
  310. meta: [],
  311. series: [],
  312. };
  313. describe('dataFrameToLogsModel', () => {
  314. it('given empty series should return empty logs model', () => {
  315. expect(dataFrameToLogsModel([] as DataFrame[], 0)).toMatchObject(emptyLogsModel);
  316. });
  317. it('given series without correct series name should return empty logs model', () => {
  318. const series: DataFrame[] = [
  319. toDataFrame({
  320. fields: [],
  321. }),
  322. ];
  323. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  324. });
  325. it('given series without a time field should return empty logs model', () => {
  326. const series: DataFrame[] = [
  327. new DataFrameHelper({
  328. fields: [
  329. {
  330. name: 'message',
  331. type: FieldType.string,
  332. values: [],
  333. },
  334. ],
  335. }),
  336. ];
  337. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  338. });
  339. it('given series without a string field should return empty logs model', () => {
  340. const series: DataFrame[] = [
  341. new DataFrameHelper({
  342. fields: [
  343. {
  344. name: 'time',
  345. type: FieldType.time,
  346. values: [],
  347. },
  348. ],
  349. }),
  350. ];
  351. expect(dataFrameToLogsModel(series, 0)).toMatchObject(emptyLogsModel);
  352. });
  353. it('given one series should return expected logs model', () => {
  354. const series: DataFrame[] = [
  355. new DataFrameHelper({
  356. labels: {
  357. filename: '/var/log/grafana/grafana.log',
  358. job: 'grafana',
  359. },
  360. fields: [
  361. {
  362. name: 'time',
  363. type: FieldType.time,
  364. values: ['2019-04-26T09:28:11.352440161Z', '2019-04-26T14:42:50.991981292Z'],
  365. },
  366. {
  367. name: 'message',
  368. type: FieldType.string,
  369. values: [
  370. 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  371. 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  372. ],
  373. },
  374. ],
  375. meta: {
  376. limit: 1000,
  377. },
  378. }),
  379. ];
  380. const logsModel = dataFrameToLogsModel(series, 0);
  381. expect(logsModel.hasUniqueLabels).toBeFalsy();
  382. expect(logsModel.rows).toHaveLength(2);
  383. expect(logsModel.rows).toMatchObject([
  384. {
  385. timestamp: '2019-04-26T09:28:11.352440161Z',
  386. entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
  387. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  388. logLevel: 'info',
  389. uniqueLabels: {},
  390. },
  391. {
  392. timestamp: '2019-04-26T14:42:50.991981292Z',
  393. entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
  394. labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
  395. logLevel: 'error',
  396. uniqueLabels: {},
  397. },
  398. ]);
  399. expect(logsModel.series).toHaveLength(2);
  400. expect(logsModel.meta).toHaveLength(2);
  401. expect(logsModel.meta[0]).toMatchObject({
  402. label: 'Common labels',
  403. value: series[0].labels,
  404. kind: LogsMetaKind.LabelsMap,
  405. });
  406. expect(logsModel.meta[1]).toMatchObject({
  407. label: 'Limit',
  408. value: `1000 (2 returned)`,
  409. kind: LogsMetaKind.String,
  410. });
  411. });
  412. it('given one series without labels should return expected logs model', () => {
  413. const series: DataFrame[] = [
  414. new DataFrameHelper({
  415. fields: [
  416. {
  417. name: 'time',
  418. type: FieldType.time,
  419. values: ['1970-01-01T00:00:01Z'],
  420. },
  421. {
  422. name: 'message',
  423. type: FieldType.string,
  424. values: ['WARN boooo'],
  425. },
  426. {
  427. name: 'level',
  428. type: FieldType.string,
  429. values: ['dbug'],
  430. },
  431. ],
  432. }),
  433. ];
  434. const logsModel = dataFrameToLogsModel(series, 0);
  435. expect(logsModel.rows).toHaveLength(1);
  436. expect(logsModel.rows).toMatchObject([
  437. {
  438. entry: 'WARN boooo',
  439. labels: undefined,
  440. logLevel: LogLevel.debug,
  441. uniqueLabels: {},
  442. },
  443. ]);
  444. });
  445. it('given multiple series should return expected logs model', () => {
  446. const series: DataFrame[] = [
  447. toDataFrame({
  448. labels: {
  449. foo: 'bar',
  450. baz: '1',
  451. level: 'dbug',
  452. },
  453. fields: [
  454. {
  455. name: 'ts',
  456. type: FieldType.time,
  457. values: ['1970-01-01T00:00:01Z'],
  458. },
  459. {
  460. name: 'line',
  461. type: FieldType.string,
  462. values: ['WARN boooo'],
  463. },
  464. ],
  465. }),
  466. toDataFrame({
  467. name: 'logs',
  468. labels: {
  469. foo: 'bar',
  470. baz: '2',
  471. level: 'err',
  472. },
  473. fields: [
  474. {
  475. name: 'time',
  476. type: FieldType.time,
  477. values: ['1970-01-01T00:00:00Z', '1970-01-01T00:00:02Z'],
  478. },
  479. {
  480. name: 'message',
  481. type: FieldType.string,
  482. values: ['INFO 1', 'INFO 2'],
  483. },
  484. ],
  485. }),
  486. ];
  487. const logsModel = dataFrameToLogsModel(series, 0);
  488. expect(logsModel.hasUniqueLabels).toBeTruthy();
  489. expect(logsModel.rows).toHaveLength(3);
  490. expect(logsModel.rows).toMatchObject([
  491. {
  492. entry: 'WARN boooo',
  493. labels: { foo: 'bar', baz: '1' },
  494. logLevel: LogLevel.debug,
  495. uniqueLabels: { baz: '1' },
  496. },
  497. {
  498. entry: 'INFO 1',
  499. labels: { foo: 'bar', baz: '2' },
  500. logLevel: LogLevel.error,
  501. uniqueLabels: { baz: '2' },
  502. },
  503. {
  504. entry: 'INFO 2',
  505. labels: { foo: 'bar', baz: '2' },
  506. logLevel: LogLevel.error,
  507. uniqueLabels: { baz: '2' },
  508. },
  509. ]);
  510. expect(logsModel.series).toHaveLength(2);
  511. expect(logsModel.meta).toHaveLength(1);
  512. expect(logsModel.meta[0]).toMatchObject({
  513. label: 'Common labels',
  514. value: {
  515. foo: 'bar',
  516. },
  517. kind: LogsMetaKind.LabelsMap,
  518. });
  519. });
  520. });