result_transformer.test.ts 3.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151
  1. import { LogsStream } from 'app/core/logs_model';
  2. import { mergeStreamsToLogs, logStreamToSeriesData, seriesDataToLogStream } from './result_transformer';
  3. describe('mergeStreamsToLogs()', () => {
  4. it('returns empty logs given no streams', () => {
  5. expect(mergeStreamsToLogs([]).rows).toEqual([]);
  6. });
  7. it('returns processed logs from single stream', () => {
  8. const stream1: LogsStream = {
  9. labels: '{foo="bar"}',
  10. entries: [
  11. {
  12. line: 'WARN boooo',
  13. ts: '1970-01-01T00:00:00Z',
  14. },
  15. ],
  16. };
  17. expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
  18. {
  19. entry: 'WARN boooo',
  20. labels: { foo: 'bar' },
  21. key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
  22. logLevel: 'warning',
  23. uniqueLabels: {},
  24. },
  25. ]);
  26. });
  27. it('returns merged logs from multiple streams sorted by time and with unique labels', () => {
  28. const stream1: LogsStream = {
  29. labels: '{foo="bar", baz="1"}',
  30. entries: [
  31. {
  32. line: 'WARN boooo',
  33. ts: '1970-01-01T00:00:01Z',
  34. },
  35. ],
  36. };
  37. const stream2: LogsStream = {
  38. labels: '{foo="bar", baz="2"}',
  39. entries: [
  40. {
  41. line: 'INFO 1',
  42. ts: '1970-01-01T00:00:00Z',
  43. },
  44. {
  45. line: 'INFO 2',
  46. ts: '1970-01-01T00:00:02Z',
  47. },
  48. ],
  49. };
  50. expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
  51. {
  52. entry: 'INFO 2',
  53. labels: { foo: 'bar', baz: '2' },
  54. logLevel: 'info',
  55. uniqueLabels: { baz: '2' },
  56. },
  57. {
  58. entry: 'WARN boooo',
  59. labels: { foo: 'bar', baz: '1' },
  60. logLevel: 'warning',
  61. uniqueLabels: { baz: '1' },
  62. },
  63. {
  64. entry: 'INFO 1',
  65. labels: { foo: 'bar', baz: '2' },
  66. logLevel: 'info',
  67. uniqueLabels: { baz: '2' },
  68. },
  69. ]);
  70. });
  71. it('detects ANSI codes', () => {
  72. expect(
  73. mergeStreamsToLogs([
  74. {
  75. labels: '{foo="bar"}',
  76. entries: [
  77. {
  78. line: "foo: 'bar'",
  79. ts: '1970-01-01T00:00:00Z',
  80. },
  81. ],
  82. },
  83. {
  84. labels: '{bar="foo"}',
  85. entries: [
  86. {
  87. line: "bar: 'foo'",
  88. ts: '1970-01-01T00:00:00Z',
  89. },
  90. ],
  91. },
  92. ]).rows
  93. ).toMatchObject([
  94. {
  95. entry: "bar: 'foo'",
  96. hasAnsi: false,
  97. key: 'EK1970-01-01T00:00:00Z{bar="foo"}',
  98. labels: { bar: 'foo' },
  99. logLevel: 'unknown',
  100. raw: "bar: 'foo'",
  101. },
  102. {
  103. entry: "foo: 'bar'",
  104. hasAnsi: true,
  105. key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
  106. labels: { foo: 'bar' },
  107. logLevel: 'unknown',
  108. raw: "foo: 'bar'",
  109. },
  110. ]);
  111. });
  112. });
  113. describe('convert SeriesData to/from LogStream', () => {
  114. const streams = [
  115. {
  116. labels: '{foo="bar"}',
  117. entries: [
  118. {
  119. line: "foo: 'bar'",
  120. ts: '1970-01-01T00:00:00Z',
  121. },
  122. ],
  123. },
  124. {
  125. labels: '{bar="foo"}',
  126. entries: [
  127. {
  128. line: "bar: 'foo'",
  129. ts: '1970-01-01T00:00:00Z',
  130. },
  131. ],
  132. },
  133. ];
  134. it('converts streams to series', () => {
  135. const data = streams.map(stream => logStreamToSeriesData(stream));
  136. expect(data.length).toBe(2);
  137. expect(data[0].labels['foo']).toEqual('bar');
  138. expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
  139. const roundtrip = data.map(series => seriesDataToLogStream(series));
  140. expect(roundtrip.length).toBe(2);
  141. expect(roundtrip[0].labels).toEqual(streams[0].labels);
  142. });
  143. });