sql_engine.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548
  1. package tsdb
  2. import (
  3. "container/list"
  4. "context"
  5. "database/sql"
  6. "fmt"
  7. "math"
  8. "strings"
  9. "sync"
  10. "time"
  11. "github.com/grafana/grafana/pkg/log"
  12. "github.com/grafana/grafana/pkg/components/null"
  13. "github.com/go-xorm/core"
  14. "github.com/go-xorm/xorm"
  15. "github.com/grafana/grafana/pkg/components/simplejson"
  16. "github.com/grafana/grafana/pkg/models"
  17. )
  18. // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and
  19. // timeRange to be able to generate queries that use from and to.
  20. type SqlMacroEngine interface {
  21. Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error)
  22. }
  23. // SqlTableRowTransformer transforms a query result row to RowValues with proper types.
  24. type SqlTableRowTransformer interface {
  25. Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error)
  26. }
  27. type engineCacheType struct {
  28. cache map[int64]*xorm.Engine
  29. versions map[int64]int
  30. sync.Mutex
  31. }
  32. var engineCache = engineCacheType{
  33. cache: make(map[int64]*xorm.Engine),
  34. versions: make(map[int64]int),
  35. }
  36. var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
  37. return xorm.NewEngine(driverName, connectionString)
  38. }
  39. type sqlQueryEndpoint struct {
  40. macroEngine SqlMacroEngine
  41. rowTransformer SqlTableRowTransformer
  42. engine *xorm.Engine
  43. timeColumnNames []string
  44. metricColumnTypes []string
  45. log log.Logger
  46. }
  47. type SqlQueryEndpointConfiguration struct {
  48. DriverName string
  49. Datasource *models.DataSource
  50. ConnectionString string
  51. TimeColumnNames []string
  52. MetricColumnTypes []string
  53. }
  54. var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) {
  55. queryEndpoint := sqlQueryEndpoint{
  56. rowTransformer: rowTransformer,
  57. macroEngine: macroEngine,
  58. timeColumnNames: []string{"time"},
  59. log: log,
  60. }
  61. if len(config.TimeColumnNames) > 0 {
  62. queryEndpoint.timeColumnNames = config.TimeColumnNames
  63. }
  64. if len(config.MetricColumnTypes) > 0 {
  65. queryEndpoint.metricColumnTypes = config.MetricColumnTypes
  66. }
  67. engineCache.Lock()
  68. defer engineCache.Unlock()
  69. if engine, present := engineCache.cache[config.Datasource.Id]; present {
  70. if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version {
  71. queryEndpoint.engine = engine
  72. return &queryEndpoint, nil
  73. }
  74. }
  75. engine, err := NewXormEngine(config.DriverName, config.ConnectionString)
  76. if err != nil {
  77. return nil, err
  78. }
  79. engine.SetMaxOpenConns(10)
  80. engine.SetMaxIdleConns(10)
  81. engineCache.versions[config.Datasource.Id] = config.Datasource.Version
  82. engineCache.cache[config.Datasource.Id] = engine
  83. queryEndpoint.engine = engine
  84. return &queryEndpoint, nil
  85. }
  86. const rowLimit = 1000000
  87. // Query is the main function for the SqlQueryEndpoint
  88. func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) {
  89. result := &Response{
  90. Results: make(map[string]*QueryResult),
  91. }
  92. session := e.engine.NewSession()
  93. defer session.Close()
  94. db := session.DB()
  95. for _, query := range tsdbQuery.Queries {
  96. rawSQL := query.Model.Get("rawSql").MustString()
  97. if rawSQL == "" {
  98. continue
  99. }
  100. queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
  101. result.Results[query.RefId] = queryResult
  102. rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
  103. if err != nil {
  104. queryResult.Error = err
  105. continue
  106. }
  107. queryResult.Meta.Set("sql", rawSQL)
  108. rows, err := db.Query(rawSQL)
  109. if err != nil {
  110. queryResult.Error = err
  111. continue
  112. }
  113. defer rows.Close()
  114. format := query.Model.Get("format").MustString("time_series")
  115. switch format {
  116. case "time_series":
  117. err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
  118. if err != nil {
  119. queryResult.Error = err
  120. continue
  121. }
  122. case "table":
  123. err := e.transformToTable(query, rows, queryResult, tsdbQuery)
  124. if err != nil {
  125. queryResult.Error = err
  126. continue
  127. }
  128. }
  129. }
  130. return result, nil
  131. }
  132. func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  133. columnNames, err := rows.Columns()
  134. columnCount := len(columnNames)
  135. if err != nil {
  136. return err
  137. }
  138. rowCount := 0
  139. timeIndex := -1
  140. table := &Table{
  141. Columns: make([]TableColumn, columnCount),
  142. Rows: make([]RowValues, 0),
  143. }
  144. for i, name := range columnNames {
  145. table.Columns[i].Text = name
  146. for _, tc := range e.timeColumnNames {
  147. if name == tc {
  148. timeIndex = i
  149. break
  150. }
  151. }
  152. }
  153. columnTypes, err := rows.ColumnTypes()
  154. if err != nil {
  155. return err
  156. }
  157. for ; rows.Next(); rowCount++ {
  158. if rowCount > rowLimit {
  159. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  160. }
  161. values, err := e.rowTransformer.Transform(columnTypes, rows)
  162. if err != nil {
  163. return err
  164. }
  165. // converts column named time to unix timestamp in milliseconds
  166. // to make native mssql datetime types and epoch dates work in
  167. // annotation and table queries.
  168. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  169. table.Rows = append(table.Rows, values)
  170. }
  171. result.Tables = append(result.Tables, table)
  172. result.Meta.Set("rowCount", rowCount)
  173. return nil
  174. }
  175. func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  176. pointsBySeries := make(map[string]*TimeSeries)
  177. seriesByQueryOrder := list.New()
  178. columnNames, err := rows.Columns()
  179. if err != nil {
  180. return err
  181. }
  182. columnTypes, err := rows.ColumnTypes()
  183. if err != nil {
  184. return err
  185. }
  186. rowCount := 0
  187. timeIndex := -1
  188. metricIndex := -1
  189. metricPrefix := false
  190. var metricPrefixValue string
  191. // check columns of resultset: a column named time is mandatory
  192. // the first text column is treated as metric name unless a column named metric is present
  193. for i, col := range columnNames {
  194. for _, tc := range e.timeColumnNames {
  195. if col == tc {
  196. timeIndex = i
  197. continue
  198. }
  199. }
  200. switch col {
  201. case "metric":
  202. metricIndex = i
  203. default:
  204. if metricIndex == -1 {
  205. columnType := columnTypes[i].DatabaseTypeName()
  206. for _, mct := range e.metricColumnTypes {
  207. if columnType == mct {
  208. metricIndex = i
  209. continue
  210. }
  211. }
  212. }
  213. }
  214. }
  215. // use metric column as prefix with multiple value columns
  216. if metricIndex != -1 && len(columnNames) > 3 {
  217. metricPrefix = true
  218. }
  219. if timeIndex == -1 {
  220. return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or "))
  221. }
  222. fillMissing := query.Model.Get("fill").MustBool(false)
  223. var fillInterval float64
  224. fillValue := null.Float{}
  225. if fillMissing {
  226. fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
  227. if !query.Model.Get("fillNull").MustBool(false) {
  228. fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
  229. fillValue.Valid = true
  230. }
  231. }
  232. for rows.Next() {
  233. var timestamp float64
  234. var value null.Float
  235. var metric string
  236. if rowCount > rowLimit {
  237. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  238. }
  239. values, err := e.rowTransformer.Transform(columnTypes, rows)
  240. if err != nil {
  241. return err
  242. }
  243. // converts column named time to unix timestamp in milliseconds to make
  244. // native mysql datetime types and epoch dates work in
  245. // annotation and table queries.
  246. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  247. switch columnValue := values[timeIndex].(type) {
  248. case int64:
  249. timestamp = float64(columnValue)
  250. case float64:
  251. timestamp = columnValue
  252. default:
  253. return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
  254. }
  255. if metricIndex >= 0 {
  256. if columnValue, ok := values[metricIndex].(string); ok {
  257. if metricPrefix {
  258. metricPrefixValue = columnValue
  259. } else {
  260. metric = columnValue
  261. }
  262. } else {
  263. return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
  264. }
  265. }
  266. for i, col := range columnNames {
  267. if i == timeIndex || i == metricIndex {
  268. continue
  269. }
  270. if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
  271. return err
  272. }
  273. if metricIndex == -1 {
  274. metric = col
  275. } else if metricPrefix {
  276. metric = metricPrefixValue + " " + col
  277. }
  278. series, exist := pointsBySeries[metric]
  279. if !exist {
  280. series = &TimeSeries{Name: metric}
  281. pointsBySeries[metric] = series
  282. seriesByQueryOrder.PushBack(metric)
  283. }
  284. if fillMissing {
  285. var intervalStart float64
  286. if !exist {
  287. intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
  288. } else {
  289. intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
  290. }
  291. // align interval start
  292. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  293. for i := intervalStart; i < timestamp; i += fillInterval {
  294. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  295. rowCount++
  296. }
  297. }
  298. series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)})
  299. e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
  300. }
  301. }
  302. for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
  303. key := elem.Value.(string)
  304. result.Series = append(result.Series, pointsBySeries[key])
  305. if fillMissing {
  306. series := pointsBySeries[key]
  307. // fill in values from last fetched value till interval end
  308. intervalStart := series.Points[len(series.Points)-1][1].Float64
  309. intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
  310. // align interval start
  311. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  312. for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
  313. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  314. rowCount++
  315. }
  316. }
  317. }
  318. result.Meta.Set("rowCount", rowCount)
  319. return nil
  320. }
  321. // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
  322. // to make native datetime types and epoch dates work in annotation and table queries.
  323. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
  324. if timeIndex >= 0 {
  325. switch value := values[timeIndex].(type) {
  326. case time.Time:
  327. values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
  328. case *time.Time:
  329. if value != nil {
  330. values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond)
  331. }
  332. case int64:
  333. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  334. case *int64:
  335. if value != nil {
  336. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  337. }
  338. case uint64:
  339. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  340. case *uint64:
  341. if value != nil {
  342. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  343. }
  344. case int32:
  345. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  346. case *int32:
  347. if value != nil {
  348. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  349. }
  350. case uint32:
  351. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  352. case *uint32:
  353. if value != nil {
  354. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  355. }
  356. case float64:
  357. values[timeIndex] = EpochPrecisionToMs(value)
  358. case *float64:
  359. if value != nil {
  360. values[timeIndex] = EpochPrecisionToMs(*value)
  361. }
  362. case float32:
  363. values[timeIndex] = EpochPrecisionToMs(float64(value))
  364. case *float32:
  365. if value != nil {
  366. values[timeIndex] = EpochPrecisionToMs(float64(*value))
  367. }
  368. }
  369. }
  370. }
  371. // ConvertSqlValueColumnToFloat converts timeseries value column to float.
  372. func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (null.Float, error) {
  373. var value null.Float
  374. switch typedValue := columnValue.(type) {
  375. case int:
  376. value = null.FloatFrom(float64(typedValue))
  377. case *int:
  378. if typedValue == nil {
  379. value.Valid = false
  380. } else {
  381. value = null.FloatFrom(float64(*typedValue))
  382. }
  383. case int64:
  384. value = null.FloatFrom(float64(typedValue))
  385. case *int64:
  386. if typedValue == nil {
  387. value.Valid = false
  388. } else {
  389. value = null.FloatFrom(float64(*typedValue))
  390. }
  391. case int32:
  392. value = null.FloatFrom(float64(typedValue))
  393. case *int32:
  394. if typedValue == nil {
  395. value.Valid = false
  396. } else {
  397. value = null.FloatFrom(float64(*typedValue))
  398. }
  399. case int16:
  400. value = null.FloatFrom(float64(typedValue))
  401. case *int16:
  402. if typedValue == nil {
  403. value.Valid = false
  404. } else {
  405. value = null.FloatFrom(float64(*typedValue))
  406. }
  407. case int8:
  408. value = null.FloatFrom(float64(typedValue))
  409. case *int8:
  410. if typedValue == nil {
  411. value.Valid = false
  412. } else {
  413. value = null.FloatFrom(float64(*typedValue))
  414. }
  415. case uint:
  416. value = null.FloatFrom(float64(typedValue))
  417. case *uint:
  418. if typedValue == nil {
  419. value.Valid = false
  420. } else {
  421. value = null.FloatFrom(float64(*typedValue))
  422. }
  423. case uint64:
  424. value = null.FloatFrom(float64(typedValue))
  425. case *uint64:
  426. if typedValue == nil {
  427. value.Valid = false
  428. } else {
  429. value = null.FloatFrom(float64(*typedValue))
  430. }
  431. case uint32:
  432. value = null.FloatFrom(float64(typedValue))
  433. case *uint32:
  434. if typedValue == nil {
  435. value.Valid = false
  436. } else {
  437. value = null.FloatFrom(float64(*typedValue))
  438. }
  439. case uint16:
  440. value = null.FloatFrom(float64(typedValue))
  441. case *uint16:
  442. if typedValue == nil {
  443. value.Valid = false
  444. } else {
  445. value = null.FloatFrom(float64(*typedValue))
  446. }
  447. case uint8:
  448. value = null.FloatFrom(float64(typedValue))
  449. case *uint8:
  450. if typedValue == nil {
  451. value.Valid = false
  452. } else {
  453. value = null.FloatFrom(float64(*typedValue))
  454. }
  455. case float64:
  456. value = null.FloatFrom(typedValue)
  457. case *float64:
  458. value = null.FloatFromPtr(typedValue)
  459. case float32:
  460. value = null.FloatFrom(float64(typedValue))
  461. case *float32:
  462. if typedValue == nil {
  463. value.Valid = false
  464. } else {
  465. value = null.FloatFrom(float64(*typedValue))
  466. }
  467. case nil:
  468. value.Valid = false
  469. default:
  470. return null.NewFloat(0, false), fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", columnName, typedValue, typedValue)
  471. }
  472. return value, nil
  473. }