Browse Source

Explore: Use SeriesData format for loki/logs (#16793)

This is the first step moving towards Explore supporting logs for 
more datasources than Loki. In the first step we move all the log 
processing from Loki into Explore.
- Make explore convert logs result returned from datasources to SeriesData, 
if needed, and for now convert SeriesData to LogsModel.
- Loki datasource query now returns SeriesData and all
processing have been moved into explore instead.
- Removed key from LogRowModel and use log row indexes as 
the unique key instead.
- Removed id from LogsModel since it looks like it's not in use.
- Introduced a new class FieldCache which is convenient to use when
looking up multiple fields and/or field types and series values.

Closes #16287
Marcus Efraimsson 6 years ago
parent
commit
fe20dde5db

+ 6 - 0
packages/grafana-ui/src/types/data.ts

@@ -19,6 +19,12 @@ export interface QueryResultMeta {
 
 
   // Match the result to the query
   // Match the result to the query
   requestId?: string;
   requestId?: string;
+
+  // Used in Explore for highlighting
+  search?: string;
+
+  // Used in Explore to show limit applied to search result
+  limit?: number;
 }
 }
 
 
 export interface QueryResultBase {
 export interface QueryResultBase {

+ 71 - 0
packages/grafana-ui/src/utils/fieldCache.test.ts

@@ -0,0 +1,71 @@
+import { FieldType } from '../types/index';
+import { FieldCache } from './fieldCache';
+
+describe('FieldCache', () => {
+  it('when creating a new FieldCache from fields should be able to query cache', () => {
+    const fields = [
+      { name: 'time', type: FieldType.time },
+      { name: 'string', type: FieldType.string },
+      { name: 'number', type: FieldType.number },
+      { name: 'boolean', type: FieldType.boolean },
+      { name: 'other', type: FieldType.other },
+      { name: 'undefined' },
+    ];
+    const fieldCache = new FieldCache(fields);
+    const allFields = fieldCache.getFields();
+    expect(allFields).toHaveLength(6);
+
+    const expectedFields = [
+      { ...fields[0], index: 0 },
+      { ...fields[1], index: 1 },
+      { ...fields[2], index: 2 },
+      { ...fields[3], index: 3 },
+      { ...fields[4], index: 4 },
+      { ...fields[5], type: FieldType.other, index: 5 },
+    ];
+
+    expect(allFields).toMatchObject(expectedFields);
+
+    expect(fieldCache.hasFieldOfType(FieldType.time)).toBeTruthy();
+    expect(fieldCache.hasFieldOfType(FieldType.string)).toBeTruthy();
+    expect(fieldCache.hasFieldOfType(FieldType.number)).toBeTruthy();
+    expect(fieldCache.hasFieldOfType(FieldType.boolean)).toBeTruthy();
+    expect(fieldCache.hasFieldOfType(FieldType.other)).toBeTruthy();
+
+    expect(fieldCache.getFields(FieldType.time)).toMatchObject([expectedFields[0]]);
+    expect(fieldCache.getFields(FieldType.string)).toMatchObject([expectedFields[1]]);
+    expect(fieldCache.getFields(FieldType.number)).toMatchObject([expectedFields[2]]);
+    expect(fieldCache.getFields(FieldType.boolean)).toMatchObject([expectedFields[3]]);
+    expect(fieldCache.getFields(FieldType.other)).toMatchObject([expectedFields[4], expectedFields[5]]);
+
+    expect(fieldCache.getFieldByIndex(0)).toMatchObject(expectedFields[0]);
+    expect(fieldCache.getFieldByIndex(1)).toMatchObject(expectedFields[1]);
+    expect(fieldCache.getFieldByIndex(2)).toMatchObject(expectedFields[2]);
+    expect(fieldCache.getFieldByIndex(3)).toMatchObject(expectedFields[3]);
+    expect(fieldCache.getFieldByIndex(4)).toMatchObject(expectedFields[4]);
+    expect(fieldCache.getFieldByIndex(5)).toMatchObject(expectedFields[5]);
+    expect(fieldCache.getFieldByIndex(6)).toBeNull();
+
+    expect(fieldCache.getFirstFieldOfType(FieldType.time)).toMatchObject(expectedFields[0]);
+    expect(fieldCache.getFirstFieldOfType(FieldType.string)).toMatchObject(expectedFields[1]);
+    expect(fieldCache.getFirstFieldOfType(FieldType.number)).toMatchObject(expectedFields[2]);
+    expect(fieldCache.getFirstFieldOfType(FieldType.boolean)).toMatchObject(expectedFields[3]);
+    expect(fieldCache.getFirstFieldOfType(FieldType.other)).toMatchObject(expectedFields[4]);
+
+    expect(fieldCache.hasFieldNamed('tim')).toBeFalsy();
+    expect(fieldCache.hasFieldNamed('time')).toBeTruthy();
+    expect(fieldCache.hasFieldNamed('string')).toBeTruthy();
+    expect(fieldCache.hasFieldNamed('number')).toBeTruthy();
+    expect(fieldCache.hasFieldNamed('boolean')).toBeTruthy();
+    expect(fieldCache.hasFieldNamed('other')).toBeTruthy();
+    expect(fieldCache.hasFieldNamed('undefined')).toBeTruthy();
+
+    expect(fieldCache.getFieldByName('time')).toMatchObject(expectedFields[0]);
+    expect(fieldCache.getFieldByName('string')).toMatchObject(expectedFields[1]);
+    expect(fieldCache.getFieldByName('number')).toMatchObject(expectedFields[2]);
+    expect(fieldCache.getFieldByName('boolean')).toMatchObject(expectedFields[3]);
+    expect(fieldCache.getFieldByName('other')).toMatchObject(expectedFields[4]);
+    expect(fieldCache.getFieldByName('undefined')).toMatchObject(expectedFields[5]);
+    expect(fieldCache.getFieldByName('null')).toBeNull();
+  });
+});

+ 76 - 0
packages/grafana-ui/src/utils/fieldCache.ts

@@ -0,0 +1,76 @@
+import { Field, FieldType } from '../types/index';
+
+export interface IndexedField extends Field {
+  index: number;
+}
+
+export class FieldCache {
+  private fields: Field[];
+  private fieldIndexByName: { [key: string]: number };
+  private fieldIndexByType: { [key: string]: number[] };
+
+  constructor(fields?: Field[]) {
+    this.fields = [];
+    this.fieldIndexByName = {};
+    this.fieldIndexByType = {};
+    this.fieldIndexByType[FieldType.time] = [];
+    this.fieldIndexByType[FieldType.string] = [];
+    this.fieldIndexByType[FieldType.number] = [];
+    this.fieldIndexByType[FieldType.boolean] = [];
+    this.fieldIndexByType[FieldType.other] = [];
+
+    if (fields) {
+      for (let n = 0; n < fields.length; n++) {
+        const field = fields[n];
+        this.addField(field);
+      }
+    }
+  }
+
+  addField(field: Field) {
+    this.fields.push({
+      type: FieldType.other,
+      ...field,
+    });
+    const index = this.fields.length - 1;
+    this.fieldIndexByName[field.name] = index;
+    this.fieldIndexByType[field.type || FieldType.other].push(index);
+  }
+
+  hasFieldOfType(type: FieldType): boolean {
+    return this.fieldIndexByType[type] && this.fieldIndexByType[type].length > 0;
+  }
+
+  getFields(type?: FieldType): IndexedField[] {
+    const fields: IndexedField[] = [];
+    for (let index = 0; index < this.fields.length; index++) {
+      const field = this.fields[index];
+
+      if (!type || field.type === type) {
+        fields.push({ ...field, index });
+      }
+    }
+
+    return fields;
+  }
+
+  getFieldByIndex(index: number): IndexedField | null {
+    return this.fields[index] ? { ...this.fields[index], index } : null;
+  }
+
+  getFirstFieldOfType(type: FieldType): IndexedField | null {
+    return this.hasFieldOfType(type)
+      ? { ...this.fields[this.fieldIndexByType[type][0]], index: this.fieldIndexByType[type][0] }
+      : null;
+  }
+
+  hasFieldNamed(name: string): boolean {
+    return this.fieldIndexByName[name] !== undefined;
+  }
+
+  getFieldByName(name: string): IndexedField | null {
+    return this.hasFieldNamed(name)
+      ? { ...this.fields[this.fieldIndexByName[name]], index: this.fieldIndexByName[name] }
+      : null;
+  }
+}

+ 1 - 0
packages/grafana-ui/src/utils/index.ts

@@ -14,3 +14,4 @@ export { getMappedValue } from './valueMappings';
 export * from './validate';
 export * from './validate';
 export { getFlotPairs } from './flotPairs';
 export { getFlotPairs } from './flotPairs';
 export * from './object';
 export * from './object';
+export * from './fieldCache';

+ 0 - 10
packages/grafana-ui/src/utils/processSeriesData.ts

@@ -43,16 +43,6 @@ function convertTimeSeriesToSeriesData(timeSeries: TimeSeries): SeriesData {
   };
   };
 }
 }
 
 
-export const getFirstTimeField = (series: SeriesData): number => {
-  const { fields } = series;
-  for (let i = 0; i < fields.length; i++) {
-    if (fields[i].type === FieldType.time) {
-      return i;
-    }
-  }
-  return -1;
-};
-
 // PapaParse Dynamic Typing regex:
 // PapaParse Dynamic Typing regex:
 // https://github.com/mholt/PapaParse/blob/master/papaparse.js#L998
 // https://github.com/mholt/PapaParse/blob/master/papaparse.js#L998
 const NUMBER = /^\s*-?(\d*\.?\d+|\d+\.?\d*)(e[-+]?\d+)?\s*$/i;
 const NUMBER = /^\s*-?(\d*\.?\d+|\d+\.?\d*)(e[-+]?\d+)?\s*$/i;

+ 150 - 19
public/app/core/logs_model.ts

@@ -1,7 +1,22 @@
 import _ from 'lodash';
 import _ from 'lodash';
-
-import { colors, TimeSeries, Labels, LogLevel } from '@grafana/ui';
+import moment from 'moment';
+import ansicolor from 'vendor/ansicolor/ansicolor';
+
+import {
+  colors,
+  TimeSeries,
+  Labels,
+  LogLevel,
+  SeriesData,
+  findCommonLabels,
+  findUniqueLabels,
+  getLogLevel,
+  toLegacyResponseData,
+  FieldCache,
+  FieldType,
+} from '@grafana/ui';
 import { getThemeColor } from 'app/core/utils/colors';
 import { getThemeColor } from 'app/core/utils/colors';
+import { hasAnsiCodes } from 'app/core/utils/text';
 
 
 export const LogLevelColor = {
 export const LogLevelColor = {
   [LogLevel.critical]: colors[7],
   [LogLevel.critical]: colors[7],
@@ -23,7 +38,6 @@ export interface LogRowModel {
   duplicates?: number;
   duplicates?: number;
   entry: string;
   entry: string;
   hasAnsi: boolean;
   hasAnsi: boolean;
-  key: string; // timestamp + labels
   labels: Labels;
   labels: Labels;
   logLevel: LogLevel;
   logLevel: LogLevel;
   raw: string;
   raw: string;
@@ -56,27 +70,11 @@ export interface LogsMetaItem {
 
 
 export interface LogsModel {
 export interface LogsModel {
   hasUniqueLabels: boolean;
   hasUniqueLabels: boolean;
-  id: string; // Identify one logs result from another
   meta?: LogsMetaItem[];
   meta?: LogsMetaItem[];
   rows: LogRowModel[];
   rows: LogRowModel[];
   series?: TimeSeries[];
   series?: TimeSeries[];
 }
 }
 
 
-export interface LogsStream {
-  labels: string;
-  entries: LogsStreamEntry[];
-  search?: string;
-  parsedLabels?: Labels;
-  uniqueLabels?: Labels;
-}
-
-export interface LogsStreamEntry {
-  line: string;
-  ts: string;
-  // Legacy, was renamed to ts
-  timestamp?: string;
-}
-
 export enum LogsDedupDescription {
 export enum LogsDedupDescription {
   none = 'No de-duplication',
   none = 'No de-duplication',
   exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
   exact = 'De-duplication of successive lines that are identical, ignoring ISO datetimes.',
@@ -326,3 +324,136 @@ export function makeSeriesForLogs(rows: LogRowModel[], intervalMs: number): Time
     };
     };
   });
   });
 }
 }
+
+function isLogsData(series: SeriesData) {
+  return series.fields.some(f => f.type === FieldType.time) && series.fields.some(f => f.type === FieldType.string);
+}
+
+export function seriesDataToLogsModel(seriesData: SeriesData[], intervalMs: number): LogsModel {
+  const metricSeries: SeriesData[] = [];
+  const logSeries: SeriesData[] = [];
+
+  for (const series of seriesData) {
+    if (isLogsData(series)) {
+      logSeries.push(series);
+      continue;
+    }
+
+    metricSeries.push(series);
+  }
+
+  const logsModel = logSeriesToLogsModel(logSeries);
+  if (logsModel) {
+    if (metricSeries.length === 0) {
+      logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs);
+    } else {
+      logsModel.series = [];
+      for (const series of metricSeries) {
+        logsModel.series.push(toLegacyResponseData(series) as TimeSeries);
+      }
+    }
+
+    return logsModel;
+  }
+
+  return undefined;
+}
+
+export function logSeriesToLogsModel(logSeries: SeriesData[]): LogsModel {
+  if (logSeries.length === 0) {
+    return undefined;
+  }
+
+  const allLabels: Labels[] = [];
+  for (let n = 0; n < logSeries.length; n++) {
+    const series = logSeries[n];
+    if (series.labels) {
+      allLabels.push(series.labels);
+    }
+  }
+
+  let commonLabels: Labels = {};
+  if (allLabels.length > 0) {
+    commonLabels = findCommonLabels(allLabels);
+  }
+
+  const rows: LogRowModel[] = [];
+  let hasUniqueLabels = false;
+
+  for (let i = 0; i < logSeries.length; i++) {
+    const series = logSeries[i];
+    const fieldCache = new FieldCache(series.fields);
+    const uniqueLabels = findUniqueLabels(series.labels, commonLabels);
+    if (Object.keys(uniqueLabels).length > 0) {
+      hasUniqueLabels = true;
+    }
+
+    for (let j = 0; j < series.rows.length; j++) {
+      rows.push(processLogSeriesRow(series, fieldCache, j, uniqueLabels));
+    }
+  }
+
+  const sortedRows = rows.sort((a, b) => {
+    return a.timestamp > b.timestamp ? -1 : 1;
+  });
+
+  // Meta data to display in status
+  const meta: LogsMetaItem[] = [];
+  if (_.size(commonLabels) > 0) {
+    meta.push({
+      label: 'Common labels',
+      value: commonLabels,
+      kind: LogsMetaKind.LabelsMap,
+    });
+  }
+
+  const limits = logSeries.filter(series => series.meta && series.meta.limit);
+
+  if (limits.length > 0) {
+    meta.push({
+      label: 'Limit',
+      value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
+      kind: LogsMetaKind.String,
+    });
+  }
+
+  return {
+    hasUniqueLabels,
+    meta,
+    rows: sortedRows,
+  };
+}
+
+export function processLogSeriesRow(
+  series: SeriesData,
+  fieldCache: FieldCache,
+  rowIndex: number,
+  uniqueLabels: Labels
+): LogRowModel {
+  const row = series.rows[rowIndex];
+  const timeFieldIndex = fieldCache.getFirstFieldOfType(FieldType.time).index;
+  const ts = row[timeFieldIndex];
+  const stringFieldIndex = fieldCache.getFirstFieldOfType(FieldType.string).index;
+  const message = row[stringFieldIndex];
+  const time = moment(ts);
+  const timeEpochMs = time.valueOf();
+  const timeFromNow = time.fromNow();
+  const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
+  const logLevel = getLogLevel(message);
+  const hasAnsi = hasAnsiCodes(message);
+  const search = series.meta && series.meta.search ? series.meta.search : '';
+
+  return {
+    logLevel,
+    timeFromNow,
+    timeEpochMs,
+    timeLocal,
+    uniqueLabels,
+    hasAnsi,
+    entry: hasAnsi ? ansicolor.strip(message) : message,
+    raw: message,
+    labels: series.labels,
+    searchWords: search ? [search] : [],
+    timestamp: ts,
+  };
+}

+ 216 - 0
public/app/core/specs/logs_model.test.ts

@@ -6,7 +6,10 @@ import {
   LogsDedupStrategy,
   LogsDedupStrategy,
   LogsModel,
   LogsModel,
   LogsParsers,
   LogsParsers,
+  seriesDataToLogsModel,
+  LogsMetaKind,
 } from '../logs_model';
 } from '../logs_model';
+import { SeriesData, FieldType } from '@grafana/ui';
 
 
 describe('dedupLogRows()', () => {
 describe('dedupLogRows()', () => {
   test('should return rows as is when dedup is set to none', () => {
   test('should return rows as is when dedup is set to none', () => {
@@ -329,3 +332,216 @@ describe('LogsParsers', () => {
     });
     });
   });
   });
 });
 });
+
+describe('seriesDataToLogsModel', () => {
+  it('given empty series should return undefined', () => {
+    expect(seriesDataToLogsModel([] as SeriesData[], 0)).toBeUndefined();
+  });
+
+  it('given series without correct series name should not be processed', () => {
+    const series: SeriesData[] = [
+      {
+        fields: [],
+        rows: [],
+      },
+    ];
+    expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
+  });
+
+  it('given series without a time field should not be processed', () => {
+    const series: SeriesData[] = [
+      {
+        fields: [
+          {
+            name: 'message',
+            type: FieldType.string,
+          },
+        ],
+        rows: [],
+      },
+    ];
+    expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
+  });
+
+  it('given series without a string field should not be processed', () => {
+    const series: SeriesData[] = [
+      {
+        fields: [
+          {
+            name: 'time',
+            type: FieldType.time,
+          },
+        ],
+        rows: [],
+      },
+    ];
+    expect(seriesDataToLogsModel(series, 0)).toBeUndefined();
+  });
+
+  it('given one series should return expected logs model', () => {
+    const series: SeriesData[] = [
+      {
+        labels: {
+          filename: '/var/log/grafana/grafana.log',
+          job: 'grafana',
+        },
+        fields: [
+          {
+            name: 'time',
+            type: FieldType.time,
+          },
+          {
+            name: 'message',
+            type: FieldType.string,
+          },
+        ],
+        rows: [
+          [
+            '2019-04-26T09:28:11.352440161Z',
+            't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
+          ],
+          [
+            '2019-04-26T14:42:50.991981292Z',
+            't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
+          ],
+        ],
+        meta: {
+          limit: 1000,
+        },
+      },
+    ];
+    const logsModel = seriesDataToLogsModel(series, 0);
+    expect(logsModel.hasUniqueLabels).toBeFalsy();
+    expect(logsModel.rows).toHaveLength(2);
+    expect(logsModel.rows).toMatchObject([
+      {
+        timestamp: '2019-04-26T14:42:50.991981292Z',
+        entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
+        labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
+        logLevel: 'error',
+        uniqueLabels: {},
+      },
+      {
+        timestamp: '2019-04-26T09:28:11.352440161Z',
+        entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
+        labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
+        logLevel: 'info',
+        uniqueLabels: {},
+      },
+    ]);
+
+    expect(logsModel.series).toHaveLength(2);
+    expect(logsModel.meta).toHaveLength(2);
+    expect(logsModel.meta[0]).toMatchObject({
+      label: 'Common labels',
+      value: series[0].labels,
+      kind: LogsMetaKind.LabelsMap,
+    });
+    expect(logsModel.meta[1]).toMatchObject({
+      label: 'Limit',
+      value: `1000 (2 returned)`,
+      kind: LogsMetaKind.String,
+    });
+  });
+
+  it('given one series without labels should return expected logs model', () => {
+    const series: SeriesData[] = [
+      {
+        fields: [
+          {
+            name: 'time',
+            type: FieldType.time,
+          },
+          {
+            name: 'message',
+            type: FieldType.string,
+          },
+        ],
+        rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
+      },
+    ];
+    const logsModel = seriesDataToLogsModel(series, 0);
+    expect(logsModel.rows).toHaveLength(1);
+    expect(logsModel.rows).toMatchObject([
+      {
+        entry: 'WARN boooo',
+        labels: undefined,
+        logLevel: 'warning',
+        uniqueLabels: {},
+      },
+    ]);
+  });
+
+  it('given multiple series should return expected logs model', () => {
+    const series: SeriesData[] = [
+      {
+        labels: {
+          foo: 'bar',
+          baz: '1',
+        },
+        fields: [
+          {
+            name: 'ts',
+            type: FieldType.time,
+          },
+          {
+            name: 'line',
+            type: FieldType.string,
+          },
+        ],
+        rows: [['1970-01-01T00:00:01Z', 'WARN boooo']],
+      },
+      {
+        name: 'logs',
+        labels: {
+          foo: 'bar',
+          baz: '2',
+        },
+        fields: [
+          {
+            name: 'time',
+            type: FieldType.time,
+          },
+          {
+            name: 'message',
+            type: FieldType.string,
+          },
+        ],
+        rows: [['1970-01-01T00:00:00Z', 'INFO 1'], ['1970-01-01T00:00:02Z', 'INFO 2']],
+      },
+    ];
+    const logsModel = seriesDataToLogsModel(series, 0);
+    expect(logsModel.hasUniqueLabels).toBeTruthy();
+    expect(logsModel.rows).toHaveLength(3);
+    expect(logsModel.rows).toMatchObject([
+      {
+        entry: 'INFO 2',
+        labels: { foo: 'bar', baz: '2' },
+        logLevel: 'info',
+        uniqueLabels: { baz: '2' },
+      },
+      {
+        entry: 'WARN boooo',
+        labels: { foo: 'bar', baz: '1' },
+        logLevel: 'warning',
+        uniqueLabels: { baz: '1' },
+      },
+      {
+        entry: 'INFO 1',
+        labels: { foo: 'bar', baz: '2' },
+        logLevel: 'info',
+        uniqueLabels: { baz: '2' },
+      },
+    ]);
+
+    expect(logsModel.series).toHaveLength(2);
+    expect(logsModel.meta).toHaveLength(1);
+    expect(logsModel.meta[0]).toMatchObject({
+      label: 'Common labels',
+      value: {
+        foo: 'bar',
+      },
+      kind: LogsMetaKind.LabelsMap,
+    });
+  });
+});

+ 18 - 11
public/app/core/utils/explore.ts

@@ -11,7 +11,17 @@ import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
 import { getNextRefIdChar } from './query';
 import { getNextRefIdChar } from './query';
 
 
 // Types
 // Types
-import { colors, TimeRange, RawTimeRange, TimeZone, IntervalValues, DataQuery, DataSourceApi } from '@grafana/ui';
+import {
+  colors,
+  TimeRange,
+  RawTimeRange,
+  TimeZone,
+  IntervalValues,
+  DataQuery,
+  DataSourceApi,
+  toSeriesData,
+  guessFieldTypes,
+} from '@grafana/ui';
 import TimeSeries from 'app/core/time_series2';
 import TimeSeries from 'app/core/time_series2';
 import {
 import {
   ExploreUrlState,
   ExploreUrlState,
@@ -22,7 +32,7 @@ import {
   QueryOptions,
   QueryOptions,
   ResultGetter,
   ResultGetter,
 } from 'app/types/explore';
 } from 'app/types/explore';
-import { LogsDedupStrategy } from 'app/core/logs_model';
+import { LogsDedupStrategy, seriesDataToLogsModel } from 'app/core/logs_model';
 
 
 export const DEFAULT_RANGE = {
 export const DEFAULT_RANGE = {
   from: 'now-6h',
   from: 'now-6h',
@@ -293,15 +303,12 @@ export function calculateResultsFromQueryTransactions(
       .filter(qt => qt.resultType === 'Table' && qt.done && qt.result && qt.result.columns && qt.result.rows)
       .filter(qt => qt.resultType === 'Table' && qt.done && qt.result && qt.result.columns && qt.result.rows)
       .map(qt => qt.result)
       .map(qt => qt.result)
   );
   );
-  const logsResult =
-    datasource && datasource.mergeStreams
-      ? datasource.mergeStreams(
-          _.flatten(
-            queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
-          ),
-          graphInterval
-        )
-      : undefined;
+  const logsResult = seriesDataToLogsModel(
+    _.flatten(
+      queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done && qt.result).map(qt => qt.result)
+    ).map(r => guessFieldTypes(toSeriesData(r))),
+    graphInterval
+  );
 
 
   return {
   return {
     graphResult,
     graphResult,

+ 4 - 4
public/app/features/explore/Logs.tsx

@@ -248,9 +248,9 @@ export default class Logs extends PureComponent<Props, State> {
         <div className="logs-rows">
         <div className="logs-rows">
           {hasData &&
           {hasData &&
           !deferLogs && // Only inject highlighterExpression in the first set for performance reasons
           !deferLogs && // Only inject highlighterExpression in the first set for performance reasons
-            firstRows.map(row => (
+            firstRows.map((row, index) => (
               <LogRow
               <LogRow
-                key={row.key + row.duplicates}
+                key={index}
                 getRows={getRows}
                 getRows={getRows}
                 highlighterExpressions={highlighterExpressions}
                 highlighterExpressions={highlighterExpressions}
                 row={row}
                 row={row}
@@ -264,9 +264,9 @@ export default class Logs extends PureComponent<Props, State> {
           {hasData &&
           {hasData &&
             !deferLogs &&
             !deferLogs &&
             renderAll &&
             renderAll &&
-            lastRows.map(row => (
+            lastRows.map((row, index) => (
               <LogRow
               <LogRow
-                key={row.key + row.duplicates}
+                key={PREVIEW_LIMIT + index}
                 getRows={getRows}
                 getRows={getRows}
                 row={row}
                 row={row}
                 showDuplicates={showDuplicates}
                 showDuplicates={showDuplicates}

+ 0 - 1
public/app/features/explore/LogsContainer.tsx

@@ -81,7 +81,6 @@ export class LogsContainer extends PureComponent<LogsContainerProps> {
           data={logsResult}
           data={logsResult}
           dedupedData={dedupedResult}
           dedupedData={dedupedResult}
           exploreId={exploreId}
           exploreId={exploreId}
-          key={logsResult && logsResult.id}
           highlighterExpressions={logsHighlighterExpressions}
           highlighterExpressions={logsHighlighterExpressions}
           loading={loading}
           loading={loading}
           onChangeTime={onChangeTime}
           onChangeTime={onChangeTime}

+ 9 - 17
public/app/plugins/datasource/loki/datasource.test.ts

@@ -1,6 +1,7 @@
 import LokiDatasource from './datasource';
 import LokiDatasource from './datasource';
 import { LokiQuery } from './types';
 import { LokiQuery } from './types';
 import { getQueryOptions } from 'test/helpers/getQueryOptions';
 import { getQueryOptions } from 'test/helpers/getQueryOptions';
+import { SeriesData } from '@grafana/ui';
 
 
 describe('LokiDatasource', () => {
 describe('LokiDatasource', () => {
   const instanceSettings: any = {
   const instanceSettings: any = {
@@ -50,8 +51,10 @@ describe('LokiDatasource', () => {
       expect(backendSrvMock.datasourceRequest.mock.calls[0][0].url).toContain('limit=20');
       expect(backendSrvMock.datasourceRequest.mock.calls[0][0].url).toContain('limit=20');
     });
     });
 
 
-    test('should return log streams when resultFormat is undefined', async done => {
-      const ds = new LokiDatasource(instanceSettings, backendSrvMock, templateSrvMock);
+    test('should return series data', async done => {
+      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
+      const customSettings = { ...instanceSettings, jsonData: customData };
+      const ds = new LokiDatasource(customSettings, backendSrvMock, templateSrvMock);
       backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(testResp));
       backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(testResp));
 
 
       const options = getQueryOptions<LokiQuery>({
       const options = getQueryOptions<LokiQuery>({
@@ -60,21 +63,10 @@ describe('LokiDatasource', () => {
 
 
       const res = await ds.query(options);
       const res = await ds.query(options);
 
 
-      expect(res.data[0].entries[0].line).toBe('hello');
-      done();
-    });
-
-    test('should return time series when resultFormat is time_series', async done => {
-      const ds = new LokiDatasource(instanceSettings, backendSrvMock, templateSrvMock);
-      backendSrvMock.datasourceRequest = jest.fn(() => Promise.resolve(testResp));
-
-      const options = getQueryOptions<LokiQuery>({
-        targets: [{ expr: 'foo', refId: 'B', resultFormat: 'time_series' }],
-      });
-
-      const res = await ds.query(options);
-
-      expect(res.data[0].datapoints).toBeDefined();
+      const seriesData = res.data[0] as SeriesData;
+      expect(seriesData.rows[0][1]).toBe('hello');
+      expect(seriesData.meta.limit).toBe(20);
+      expect(seriesData.meta.search).toBe('(?i)foo');
       done();
       done();
     });
     });
   });
   });

+ 10 - 24
public/app/plugins/datasource/loki/datasource.ts

@@ -5,13 +5,11 @@ import _ from 'lodash';
 import * as dateMath from 'app/core/utils/datemath';
 import * as dateMath from 'app/core/utils/datemath';
 import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
 import { addLabelToSelector } from 'app/plugins/datasource/prometheus/add_label_to_query';
 import LanguageProvider from './language_provider';
 import LanguageProvider from './language_provider';
-import { mergeStreamsToLogs } from './result_transformer';
+import { logStreamToSeriesData } from './result_transformer';
 import { formatQuery, parseQuery } from './query_utils';
 import { formatQuery, parseQuery } from './query_utils';
-import { makeSeriesForLogs } from 'app/core/logs_model';
 
 
 // Types
 // Types
-import { LogsStream, LogsModel } from 'app/core/logs_model';
-import { PluginMeta, DataQueryRequest } from '@grafana/ui/src/types';
+import { PluginMeta, DataQueryRequest, SeriesData } from '@grafana/ui/src/types';
 import { LokiQuery } from './types';
 import { LokiQuery } from './types';
 
 
 export const DEFAULT_MAX_LINES = 1000;
 export const DEFAULT_MAX_LINES = 1000;
@@ -54,12 +52,6 @@ export class LokiDatasource {
     return this.backendSrv.datasourceRequest(req);
     return this.backendSrv.datasourceRequest(req);
   }
   }
 
 
-  mergeStreams(streams: LogsStream[], intervalMs: number): LogsModel {
-    const logs = mergeStreamsToLogs(streams, this.maxLines);
-    logs.series = makeSeriesForLogs(logs.rows, intervalMs);
-    return logs;
-  }
-
   prepareQueryTarget(target, options) {
   prepareQueryTarget(target, options) {
     const interpolated = this.templateSrv.replace(target.expr);
     const interpolated = this.templateSrv.replace(target.expr);
     const start = this.getTime(options.range.from, false);
     const start = this.getTime(options.range.from, false);
@@ -85,29 +77,23 @@ export class LokiDatasource {
     const queries = queryTargets.map(target => this._request('/api/prom/query', target));
     const queries = queryTargets.map(target => this._request('/api/prom/query', target));
 
 
     return Promise.all(queries).then((results: any[]) => {
     return Promise.all(queries).then((results: any[]) => {
-      const allStreams: LogsStream[] = [];
+      const series: SeriesData[] = [];
 
 
       for (let i = 0; i < results.length; i++) {
       for (let i = 0; i < results.length; i++) {
         const result = results[i];
         const result = results[i];
-        const query = queryTargets[i];
-
-        // add search term to stream & add to array
         if (result.data) {
         if (result.data) {
           for (const stream of result.data.streams || []) {
           for (const stream of result.data.streams || []) {
-            stream.search = query.regexp;
-            allStreams.push(stream);
+            const seriesData = logStreamToSeriesData(stream);
+            seriesData.meta = {
+              search: queryTargets[i].regexp,
+              limit: this.maxLines,
+            };
+            series.push(seriesData);
           }
           }
         }
         }
       }
       }
 
 
-      // check resultType
-      if (options.targets[0].resultFormat === 'time_series') {
-        const logs = mergeStreamsToLogs(allStreams, this.maxLines);
-        logs.series = makeSeriesForLogs(logs.rows, options.intervalMs);
-        return { data: logs.series };
-      } else {
-        return { data: allStreams };
-      }
+      return { data: series };
     });
     });
   }
   }
 
 

+ 5 - 122
public/app/plugins/datasource/loki/result_transformer.test.ts

@@ -1,122 +1,6 @@
-import { LogsStream } from 'app/core/logs_model';
+import { logStreamToSeriesData } from './result_transformer';
 
 
-import { mergeStreamsToLogs, logStreamToSeriesData, seriesDataToLogStream } from './result_transformer';
-
-describe('mergeStreamsToLogs()', () => {
-  it('returns empty logs given no streams', () => {
-    expect(mergeStreamsToLogs([]).rows).toEqual([]);
-  });
-
-  it('returns processed logs from single stream', () => {
-    const stream1: LogsStream = {
-      labels: '{foo="bar"}',
-      entries: [
-        {
-          line: 'WARN boooo',
-          ts: '1970-01-01T00:00:00Z',
-        },
-      ],
-    };
-    expect(mergeStreamsToLogs([stream1]).rows).toMatchObject([
-      {
-        entry: 'WARN boooo',
-        labels: { foo: 'bar' },
-        key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
-        logLevel: 'warning',
-        uniqueLabels: {},
-      },
-    ]);
-  });
-
-  it('returns merged logs from multiple streams sorted by time and with unique labels', () => {
-    const stream1: LogsStream = {
-      labels: '{foo="bar", baz="1"}',
-      entries: [
-        {
-          line: 'WARN boooo',
-          ts: '1970-01-01T00:00:01Z',
-        },
-      ],
-    };
-    const stream2: LogsStream = {
-      labels: '{foo="bar", baz="2"}',
-      entries: [
-        {
-          line: 'INFO 1',
-          ts: '1970-01-01T00:00:00Z',
-        },
-        {
-          line: 'INFO 2',
-          ts: '1970-01-01T00:00:02Z',
-        },
-      ],
-    };
-    expect(mergeStreamsToLogs([stream1, stream2]).rows).toMatchObject([
-      {
-        entry: 'INFO 2',
-        labels: { foo: 'bar', baz: '2' },
-        logLevel: 'info',
-        uniqueLabels: { baz: '2' },
-      },
-      {
-        entry: 'WARN boooo',
-        labels: { foo: 'bar', baz: '1' },
-        logLevel: 'warning',
-        uniqueLabels: { baz: '1' },
-      },
-      {
-        entry: 'INFO 1',
-        labels: { foo: 'bar', baz: '2' },
-        logLevel: 'info',
-        uniqueLabels: { baz: '2' },
-      },
-    ]);
-  });
-
-  it('detects ANSI codes', () => {
-    expect(
-      mergeStreamsToLogs([
-        {
-          labels: '{foo="bar"}',
-          entries: [
-            {
-              line: "foo: 'bar'",
-              ts: '1970-01-01T00:00:00Z',
-            },
-          ],
-        },
-        {
-          labels: '{bar="foo"}',
-          entries: [
-            {
-              line: "bar: 'foo'",
-              ts: '1970-01-01T00:00:00Z',
-            },
-          ],
-        },
-      ]).rows
-    ).toMatchObject([
-      {
-        entry: "bar: 'foo'",
-        hasAnsi: false,
-        key: 'EK1970-01-01T00:00:00Z{bar="foo"}',
-        labels: { bar: 'foo' },
-        logLevel: 'unknown',
-        raw: "bar: 'foo'",
-      },
-      {
-        entry: "foo: 'bar'",
-        hasAnsi: true,
-        key: 'EK1970-01-01T00:00:00Z{foo="bar"}',
-        labels: { foo: 'bar' },
-        logLevel: 'unknown',
-        raw: "foo: 'bar'",
-      },
-    ]);
-  });
-});
-
-describe('convert SeriesData to/from LogStream', () => {
+describe('convert loki response to SeriesData', () => {
   const streams = [
   const streams = [
     {
     {
       labels: '{foo="bar"}',
       labels: '{foo="bar"}',
@@ -143,9 +27,8 @@ describe('convert SeriesData to/from LogStream', () => {
     expect(data.length).toBe(2);
     expect(data.length).toBe(2);
     expect(data[0].labels['foo']).toEqual('bar');
     expect(data[0].labels['foo']).toEqual('bar');
     expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
     expect(data[0].rows[0][0]).toEqual(streams[0].entries[0].ts);
-
-    const roundtrip = data.map(series => seriesDataToLogStream(series));
-    expect(roundtrip.length).toBe(2);
-    expect(roundtrip[0].labels).toEqual(streams[0].labels);
+    expect(data[0].rows[0][1]).toEqual(streams[0].entries[0].line);
+    expect(data[1].rows[0][0]).toEqual(streams[1].entries[0].ts);
+    expect(data[1].rows[0][1]).toEqual(streams[1].entries[0].line);
   });
   });
 });
 });

+ 3 - 142
public/app/plugins/datasource/loki/result_transformer.ts

@@ -1,115 +1,7 @@
-import ansicolor from 'vendor/ansicolor/ansicolor';
-import _ from 'lodash';
-import moment from 'moment';
+import { LokiLogsStream } from './types';
+import { SeriesData, parseLabels, FieldType, Labels } from '@grafana/ui';
 
 
-import { LogsMetaItem, LogsModel, LogRowModel, LogsStream, LogsStreamEntry, LogsMetaKind } from 'app/core/logs_model';
-import { hasAnsiCodes } from 'app/core/utils/text';
-import { DEFAULT_MAX_LINES } from './datasource';
-
-import {
-  parseLabels,
-  SeriesData,
-  findUniqueLabels,
-  Labels,
-  findCommonLabels,
-  getLogLevel,
-  FieldType,
-  formatLabels,
-  guessFieldTypeFromSeries,
-} from '@grafana/ui';
-
-export function processEntry(
-  entry: LogsStreamEntry,
-  labels: string,
-  parsedLabels: Labels,
-  uniqueLabels: Labels,
-  search: string
-): LogRowModel {
-  const { line } = entry;
-  const ts = entry.ts || entry.timestamp;
-  // Assumes unique-ness, needs nanosec precision for timestamp
-  const key = `EK${ts}${labels}`;
-  const time = moment(ts);
-  const timeEpochMs = time.valueOf();
-  const timeFromNow = time.fromNow();
-  const timeLocal = time.format('YYYY-MM-DD HH:mm:ss');
-  const logLevel = getLogLevel(line);
-  const hasAnsi = hasAnsiCodes(line);
-
-  return {
-    key,
-    logLevel,
-    timeFromNow,
-    timeEpochMs,
-    timeLocal,
-    uniqueLabels,
-    hasAnsi,
-    entry: hasAnsi ? ansicolor.strip(line) : line,
-    raw: line,
-    labels: parsedLabels,
-    searchWords: search ? [search] : [],
-    timestamp: ts,
-  };
-}
-
-export function mergeStreamsToLogs(streams: LogsStream[], limit = DEFAULT_MAX_LINES): LogsModel {
-  // Unique model identifier
-  const id = streams.map(stream => stream.labels).join();
-
-  // Find unique labels for each stream
-  streams = streams.map(stream => ({
-    ...stream,
-    parsedLabels: parseLabels(stream.labels),
-  }));
-  const commonLabels = findCommonLabels(streams.map(model => model.parsedLabels));
-  streams = streams.map(stream => ({
-    ...stream,
-    uniqueLabels: findUniqueLabels(stream.parsedLabels, commonLabels),
-  }));
-
-  // Merge stream entries into single list of log rows
-  const sortedRows: LogRowModel[] = _.chain(streams)
-    .reduce(
-      (acc: LogRowModel[], stream: LogsStream) => [
-        ...acc,
-        ...stream.entries.map(entry =>
-          processEntry(entry, stream.labels, stream.parsedLabels, stream.uniqueLabels, stream.search)
-        ),
-      ],
-      []
-    )
-    .sortBy('timestamp')
-    .reverse()
-    .value();
-
-  const hasUniqueLabels = sortedRows && sortedRows.some(row => Object.keys(row.uniqueLabels).length > 0);
-
-  // Meta data to display in status
-  const meta: LogsMetaItem[] = [];
-  if (_.size(commonLabels) > 0) {
-    meta.push({
-      label: 'Common labels',
-      value: commonLabels,
-      kind: LogsMetaKind.LabelsMap,
-    });
-  }
-  if (limit) {
-    meta.push({
-      label: 'Limit',
-      value: `${limit} (${sortedRows.length} returned)`,
-      kind: LogsMetaKind.String,
-    });
-  }
-
-  return {
-    id,
-    hasUniqueLabels,
-    meta,
-    rows: sortedRows,
-  };
-}
-
-export function logStreamToSeriesData(stream: LogsStream): SeriesData {
+export function logStreamToSeriesData(stream: LokiLogsStream): SeriesData {
   let labels: Labels = stream.parsedLabels;
   let labels: Labels = stream.parsedLabels;
   if (!labels && stream.labels) {
   if (!labels && stream.labels) {
     labels = parseLabels(stream.labels);
     labels = parseLabels(stream.labels);
@@ -122,34 +14,3 @@ export function logStreamToSeriesData(stream: LogsStream): SeriesData {
     }),
     }),
   };
   };
 }
 }
-
-export function seriesDataToLogStream(series: SeriesData): LogsStream {
-  let timeIndex = -1;
-  let lineIndex = -1;
-  for (let i = 0; i < series.fields.length; i++) {
-    const field = series.fields[i];
-    const type = field.type || guessFieldTypeFromSeries(series, i);
-    if (timeIndex < 0 && type === FieldType.time) {
-      timeIndex = i;
-    }
-    if (lineIndex < 0 && type === FieldType.string) {
-      lineIndex = i;
-    }
-  }
-  if (timeIndex < 0) {
-    throw new Error('Series does not have a time field');
-  }
-  if (lineIndex < 0) {
-    throw new Error('Series does not have a line field');
-  }
-  return {
-    labels: formatLabels(series.labels),
-    parsedLabels: series.labels,
-    entries: series.rows.map(row => {
-      return {
-        line: row[lineIndex],
-        ts: row[timeIndex],
-      };
-    }),
-  };
-}

+ 15 - 3
public/app/plugins/datasource/loki/types.ts

@@ -1,8 +1,20 @@
-import { DataQuery } from '@grafana/ui/src/types';
+import { DataQuery, Labels } from '@grafana/ui/src/types';
 
 
 export interface LokiQuery extends DataQuery {
 export interface LokiQuery extends DataQuery {
   expr: string;
   expr: string;
-  resultFormat?: LokiQueryResultFormats;
 }
 }
 
 
-export type LokiQueryResultFormats = 'time_series' | 'logs';
+export interface LokiLogsStream {
+  labels: string;
+  entries: LokiLogsStreamEntry[];
+  search?: string;
+  parsedLabels?: Labels;
+  uniqueLabels?: Labels;
+}
+
+export interface LokiLogsStreamEntry {
+  line: string;
+  ts: string;
+  // Legacy, was renamed to ts
+  timestamp?: string;
+}

+ 40 - 42
public/app/plugins/panel/graph2/getGraphSeriesModel.ts

@@ -1,7 +1,5 @@
 import {
 import {
   GraphSeriesXY,
   GraphSeriesXY,
-  getFirstTimeField,
-  FieldType,
   NullValueMode,
   NullValueMode,
   calculateStats,
   calculateStats,
   colors,
   colors,
@@ -10,6 +8,8 @@ import {
   getDisplayProcessor,
   getDisplayProcessor,
   DisplayValue,
   DisplayValue,
   PanelData,
   PanelData,
+  FieldCache,
+  FieldType,
 } from '@grafana/ui';
 } from '@grafana/ui';
 import { SeriesOptions, GraphOptions } from './types';
 import { SeriesOptions, GraphOptions } from './types';
 import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
 import { GraphLegendEditorLegendOptions } from './GraphLegendEditor';
@@ -27,54 +27,52 @@ export const getGraphSeriesModel = (
   });
   });
 
 
   for (const series of data.series) {
   for (const series of data.series) {
-    const timeColumn = getFirstTimeField(series);
-    if (timeColumn < 0) {
+    const fieldCache = new FieldCache(series.fields);
+    const timeColumn = fieldCache.getFirstFieldOfType(FieldType.time);
+    if (!timeColumn) {
       continue;
       continue;
     }
     }
 
 
-    for (let i = 0; i < series.fields.length; i++) {
-      const field = series.fields[i];
-
-      // Show all numeric columns
-      if (field.type === FieldType.number) {
-        // Use external calculator just to make sure it works :)
-        const points = getFlotPairs({
-          series,
-          xIndex: timeColumn,
-          yIndex: i,
-          nullValueMode: NullValueMode.Null,
-        });
-
-        if (points.length > 0) {
-          const seriesStats = calculateStats({ series, stats: legendOptions.stats, fieldIndex: i });
-          let statsDisplayValues;
+    const numberFields = fieldCache.getFields(FieldType.number);
+    for (let i = 0; i < numberFields.length; i++) {
+      const field = numberFields[i];
+      // Use external calculator just to make sure it works :)
+      const points = getFlotPairs({
+        series,
+        xIndex: timeColumn.index,
+        yIndex: field.index,
+        nullValueMode: NullValueMode.Null,
+      });
 
 
-          if (legendOptions.stats) {
-            statsDisplayValues = legendOptions.stats.map<DisplayValue>(stat => {
-              const statDisplayValue = displayProcessor(seriesStats[stat]);
+      if (points.length > 0) {
+        const seriesStats = calculateStats({ series, stats: legendOptions.stats, fieldIndex: field.index });
+        let statsDisplayValues;
 
 
-              return {
-                ...statDisplayValue,
-                text: statDisplayValue.text,
-                title: stat,
-              };
-            });
-          }
+        if (legendOptions.stats) {
+          statsDisplayValues = legendOptions.stats.map<DisplayValue>(stat => {
+            const statDisplayValue = displayProcessor(seriesStats[stat]);
 
 
-          const seriesColor =
-            seriesOptions[field.name] && seriesOptions[field.name].color
-              ? getColorFromHexRgbOrName(seriesOptions[field.name].color)
-              : colors[graphs.length % colors.length];
-
-          graphs.push({
-            label: field.name,
-            data: points,
-            color: seriesColor,
-            info: statsDisplayValues,
-            isVisible: true,
-            yAxis: (seriesOptions[field.name] && seriesOptions[field.name].yAxis) || 1,
+            return {
+              ...statDisplayValue,
+              text: statDisplayValue.text,
+              title: stat,
+            };
           });
           });
         }
         }
+
+        const seriesColor =
+          seriesOptions[field.name] && seriesOptions[field.name].color
+            ? getColorFromHexRgbOrName(seriesOptions[field.name].color)
+            : colors[graphs.length % colors.length];
+
+        graphs.push({
+          label: field.name,
+          data: points,
+          color: seriesColor,
+          info: statsDisplayValues,
+          isVisible: true,
+          yAxis: (seriesOptions[field.name] && seriesOptions[field.name].yAxis) || 1,
+        });
       }
       }
     }
     }
   }
   }

+ 59 - 61
public/app/plugins/panel/singlestat2/SingleStatPanel.tsx

@@ -16,9 +16,9 @@ import {
   PanelProps,
   PanelProps,
   getDisplayProcessor,
   getDisplayProcessor,
   NullValueMode,
   NullValueMode,
-  FieldType,
   calculateStats,
   calculateStats,
-  getFirstTimeField,
+  FieldCache,
+  FieldType,
 } from '@grafana/ui';
 } from '@grafana/ui';
 
 
 interface SingleStatDisplay {
 interface SingleStatDisplay {
@@ -51,73 +51,71 @@ export class SingleStatPanel extends PureComponent<PanelProps<SingleStatOptions>
 
 
     const values: SingleStatDisplay[] = [];
     const values: SingleStatDisplay[] = [];
     for (const series of data.series) {
     for (const series of data.series) {
-      const timeColumn = sparkline.show ? getFirstTimeField(series) : -1;
+      const fieldCache = new FieldCache(series.fields);
+      const timeColumn = sparkline.show ? fieldCache.getFirstFieldOfType(FieldType.time) : null;
+      const numberFields = fieldCache.getFields(FieldType.number);
+
+      for (let i = 0; i < numberFields.length; i++) {
+        const field = numberFields[i];
+        const stats = calculateStats({
+          series,
+          fieldIndex: field.index,
+          stats: [stat], // The stats to calculate
+          nullValueMode: NullValueMode.Null,
+        });
+
+        const v: SingleStatDisplay = {
+          value: display(stats[stat]),
+        };
+        v.value.title = replaceVariables(field.name);
+
+        const color = v.value.color;
+        if (!colorValue) {
+          delete v.value.color;
+        }
+
+        if (colorBackground) {
+          v.backgroundColor = color;
+        }
+
+        if (options.valueFontSize) {
+          v.value.fontSize = options.valueFontSize;
+        }
 
 
-      for (let i = 0; i < series.fields.length; i++) {
-        const field = series.fields[i];
+        if (valueOptions.prefix) {
+          v.prefix = {
+            text: replaceVariables(valueOptions.prefix),
+            numeric: NaN,
+            color: colorPrefix ? color : null,
+            fontSize: options.prefixFontSize,
+          };
+        }
+        if (valueOptions.suffix) {
+          v.suffix = {
+            text: replaceVariables(valueOptions.suffix),
+            numeric: NaN,
+            color: colorPostfix ? color : null,
+            fontSize: options.postfixFontSize,
+          };
+        }
 
 
-        // Show all fields that are not 'time'
-        if (field.type === FieldType.number) {
-          const stats = calculateStats({
+        if (sparkline.show && timeColumn) {
+          const points = getFlotPairs({
             series,
             series,
-            fieldIndex: i,
-            stats: [stat], // The stats to calculate
+            xIndex: timeColumn.index,
+            yIndex: field.index,
             nullValueMode: NullValueMode.Null,
             nullValueMode: NullValueMode.Null,
           });
           });
 
 
-          const v: SingleStatDisplay = {
-            value: display(stats[stat]),
+          v.sparkline = {
+            ...sparkline,
+            data: points,
+            minX: timeRange.from.valueOf(),
+            maxX: timeRange.to.valueOf(),
           };
           };
-          v.value.title = replaceVariables(field.name);
-
-          const color = v.value.color;
-          if (!colorValue) {
-            delete v.value.color;
-          }
-
-          if (colorBackground) {
-            v.backgroundColor = color;
-          }
-
-          if (options.valueFontSize) {
-            v.value.fontSize = options.valueFontSize;
-          }
-
-          if (valueOptions.prefix) {
-            v.prefix = {
-              text: replaceVariables(valueOptions.prefix),
-              numeric: NaN,
-              color: colorPrefix ? color : null,
-              fontSize: options.prefixFontSize,
-            };
-          }
-          if (valueOptions.suffix) {
-            v.suffix = {
-              text: replaceVariables(valueOptions.suffix),
-              numeric: NaN,
-              color: colorPostfix ? color : null,
-              fontSize: options.postfixFontSize,
-            };
-          }
-
-          if (sparkline.show && timeColumn >= 0) {
-            const points = getFlotPairs({
-              series,
-              xIndex: timeColumn,
-              yIndex: i,
-              nullValueMode: NullValueMode.Null,
-            });
-
-            v.sparkline = {
-              ...sparkline,
-              data: points,
-              minX: timeRange.from.valueOf(),
-              maxX: timeRange.to.valueOf(),
-            };
-          }
-
-          values.push(v);
         }
         }
+
+        values.push(v);
       }
       }
     }
     }