sql_engine.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549
  1. package tsdb
  2. import (
  3. "container/list"
  4. "context"
  5. "database/sql"
  6. "fmt"
  7. "math"
  8. "strings"
  9. "sync"
  10. "time"
  11. "github.com/grafana/grafana/pkg/log"
  12. "github.com/grafana/grafana/pkg/components/null"
  13. "github.com/go-xorm/core"
  14. "github.com/go-xorm/xorm"
  15. "github.com/grafana/grafana/pkg/components/simplejson"
  16. "github.com/grafana/grafana/pkg/models"
  17. )
  18. // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and
  19. // timeRange to be able to generate queries that use from and to.
  20. type SqlMacroEngine interface {
  21. Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error)
  22. }
  23. // SqlTableRowTransformer transforms a query result row to RowValues with proper types.
  24. type SqlTableRowTransformer interface {
  25. Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error)
  26. }
  27. type engineCacheType struct {
  28. cache map[int64]*xorm.Engine
  29. versions map[int64]int
  30. sync.Mutex
  31. }
  32. var engineCache = engineCacheType{
  33. cache: make(map[int64]*xorm.Engine),
  34. versions: make(map[int64]int),
  35. }
  36. var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
  37. return xorm.NewEngine(driverName, connectionString)
  38. }
  39. type sqlQueryEndpoint struct {
  40. macroEngine SqlMacroEngine
  41. rowTransformer SqlTableRowTransformer
  42. engine *xorm.Engine
  43. timeColumnNames []string
  44. metricColumnTypes []string
  45. log log.Logger
  46. }
  47. type SqlQueryEndpointConfiguration struct {
  48. DriverName string
  49. Datasource *models.DataSource
  50. ConnectionString string
  51. TimeColumnNames []string
  52. MetricColumnTypes []string
  53. }
  54. var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) {
  55. queryEndpoint := sqlQueryEndpoint{
  56. rowTransformer: rowTransformer,
  57. macroEngine: macroEngine,
  58. timeColumnNames: []string{"time"},
  59. log: log,
  60. }
  61. if len(config.TimeColumnNames) > 0 {
  62. queryEndpoint.timeColumnNames = config.TimeColumnNames
  63. }
  64. if len(config.MetricColumnTypes) > 0 {
  65. queryEndpoint.metricColumnTypes = config.MetricColumnTypes
  66. }
  67. engineCache.Lock()
  68. defer engineCache.Unlock()
  69. if engine, present := engineCache.cache[config.Datasource.Id]; present {
  70. if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version {
  71. queryEndpoint.engine = engine
  72. return &queryEndpoint, nil
  73. }
  74. }
  75. engine, err := NewXormEngine(config.DriverName, config.ConnectionString)
  76. if err != nil {
  77. return nil, err
  78. }
  79. engine.SetMaxOpenConns(10)
  80. engine.SetMaxIdleConns(10)
  81. engineCache.versions[config.Datasource.Id] = config.Datasource.Version
  82. engineCache.cache[config.Datasource.Id] = engine
  83. queryEndpoint.engine = engine
  84. return &queryEndpoint, nil
  85. }
  86. const rowLimit = 1000000
  87. // Query is the main function for the SqlQueryEndpoint
  88. func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) {
  89. result := &Response{
  90. Results: make(map[string]*QueryResult),
  91. }
  92. session := e.engine.NewSession()
  93. defer session.Close()
  94. db := session.DB()
  95. for _, query := range tsdbQuery.Queries {
  96. rawSQL := query.Model.Get("rawSql").MustString()
  97. if rawSQL == "" {
  98. continue
  99. }
  100. queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
  101. result.Results[query.RefId] = queryResult
  102. rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
  103. if err != nil {
  104. queryResult.Error = err
  105. continue
  106. }
  107. queryResult.Meta.Set("sql", rawSQL)
  108. rows, err := db.Query(rawSQL)
  109. if err != nil {
  110. queryResult.Error = err
  111. continue
  112. }
  113. defer rows.Close()
  114. format := query.Model.Get("format").MustString("time_series")
  115. switch format {
  116. case "time_series":
  117. err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
  118. if err != nil {
  119. queryResult.Error = err
  120. continue
  121. }
  122. case "table":
  123. err := e.transformToTable(query, rows, queryResult, tsdbQuery)
  124. if err != nil {
  125. queryResult.Error = err
  126. continue
  127. }
  128. }
  129. }
  130. return result, nil
  131. }
  132. func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  133. columnNames, err := rows.Columns()
  134. columnCount := len(columnNames)
  135. if err != nil {
  136. return err
  137. }
  138. rowCount := 0
  139. timeIndex := -1
  140. table := &Table{
  141. Columns: make([]TableColumn, columnCount),
  142. Rows: make([]RowValues, 0),
  143. }
  144. for i, name := range columnNames {
  145. table.Columns[i].Text = name
  146. for _, tc := range e.timeColumnNames {
  147. if name == tc {
  148. timeIndex = i
  149. break
  150. }
  151. }
  152. }
  153. columnTypes, err := rows.ColumnTypes()
  154. if err != nil {
  155. return err
  156. }
  157. for ; rows.Next(); rowCount++ {
  158. if rowCount > rowLimit {
  159. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  160. }
  161. values, err := e.rowTransformer.Transform(columnTypes, rows)
  162. if err != nil {
  163. return err
  164. }
  165. // converts column named time to unix timestamp in milliseconds
  166. // to make native mssql datetime types and epoch dates work in
  167. // annotation and table queries.
  168. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  169. table.Rows = append(table.Rows, values)
  170. }
  171. result.Tables = append(result.Tables, table)
  172. result.Meta.Set("rowCount", rowCount)
  173. return nil
  174. }
  175. func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  176. pointsBySeries := make(map[string]*TimeSeries)
  177. seriesByQueryOrder := list.New()
  178. columnNames, err := rows.Columns()
  179. if err != nil {
  180. return err
  181. }
  182. columnTypes, err := rows.ColumnTypes()
  183. if err != nil {
  184. return err
  185. }
  186. rowCount := 0
  187. timeIndex := -1
  188. metricIndex := -1
  189. metricPrefix := false
  190. var metricPrefixValue string
  191. // check columns of resultset: a column named time is mandatory
  192. // the first text column is treated as metric name unless a column named metric is present
  193. for i, col := range columnNames {
  194. for _, tc := range e.timeColumnNames {
  195. if col == tc {
  196. timeIndex = i
  197. continue
  198. }
  199. }
  200. switch col {
  201. case "metric":
  202. metricIndex = i
  203. default:
  204. if metricIndex == -1 {
  205. columnType := columnTypes[i].DatabaseTypeName()
  206. for _, mct := range e.metricColumnTypes {
  207. e.log.Info(mct)
  208. if columnType == mct {
  209. metricIndex = i
  210. continue
  211. }
  212. }
  213. }
  214. }
  215. }
  216. // use metric column as prefix with multiple value columns
  217. if metricIndex != -1 && len(columnNames) > 3 {
  218. metricPrefix = true
  219. }
  220. if timeIndex == -1 {
  221. return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or "))
  222. }
  223. fillMissing := query.Model.Get("fill").MustBool(false)
  224. var fillInterval float64
  225. fillValue := null.Float{}
  226. if fillMissing {
  227. fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
  228. if !query.Model.Get("fillNull").MustBool(false) {
  229. fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
  230. fillValue.Valid = true
  231. }
  232. }
  233. for rows.Next() {
  234. var timestamp float64
  235. var value null.Float
  236. var metric string
  237. if rowCount > rowLimit {
  238. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  239. }
  240. values, err := e.rowTransformer.Transform(columnTypes, rows)
  241. if err != nil {
  242. return err
  243. }
  244. // converts column named time to unix timestamp in milliseconds to make
  245. // native mysql datetime types and epoch dates work in
  246. // annotation and table queries.
  247. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  248. switch columnValue := values[timeIndex].(type) {
  249. case int64:
  250. timestamp = float64(columnValue)
  251. case float64:
  252. timestamp = columnValue
  253. default:
  254. return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
  255. }
  256. if metricIndex >= 0 {
  257. if columnValue, ok := values[metricIndex].(string); ok {
  258. if metricPrefix {
  259. metricPrefixValue = columnValue
  260. } else {
  261. metric = columnValue
  262. }
  263. } else {
  264. return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
  265. }
  266. }
  267. for i, col := range columnNames {
  268. if i == timeIndex || i == metricIndex {
  269. continue
  270. }
  271. if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
  272. return err
  273. }
  274. if metricIndex == -1 {
  275. metric = col
  276. } else if metricPrefix {
  277. metric = metricPrefixValue + " " + col
  278. }
  279. series, exist := pointsBySeries[metric]
  280. if !exist {
  281. series = &TimeSeries{Name: metric}
  282. pointsBySeries[metric] = series
  283. seriesByQueryOrder.PushBack(metric)
  284. }
  285. if fillMissing {
  286. var intervalStart float64
  287. if !exist {
  288. intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
  289. } else {
  290. intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
  291. }
  292. // align interval start
  293. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  294. for i := intervalStart; i < timestamp; i += fillInterval {
  295. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  296. rowCount++
  297. }
  298. }
  299. series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)})
  300. e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
  301. }
  302. }
  303. for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
  304. key := elem.Value.(string)
  305. result.Series = append(result.Series, pointsBySeries[key])
  306. if fillMissing {
  307. series := pointsBySeries[key]
  308. // fill in values from last fetched value till interval end
  309. intervalStart := series.Points[len(series.Points)-1][1].Float64
  310. intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
  311. // align interval start
  312. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  313. for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
  314. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  315. rowCount++
  316. }
  317. }
  318. }
  319. result.Meta.Set("rowCount", rowCount)
  320. return nil
  321. }
  322. // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
  323. // to make native datetime types and epoch dates work in annotation and table queries.
  324. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
  325. if timeIndex >= 0 {
  326. switch value := values[timeIndex].(type) {
  327. case time.Time:
  328. values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
  329. case *time.Time:
  330. if value != nil {
  331. values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond)
  332. }
  333. case int64:
  334. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  335. case *int64:
  336. if value != nil {
  337. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  338. }
  339. case uint64:
  340. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  341. case *uint64:
  342. if value != nil {
  343. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  344. }
  345. case int32:
  346. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  347. case *int32:
  348. if value != nil {
  349. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  350. }
  351. case uint32:
  352. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  353. case *uint32:
  354. if value != nil {
  355. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  356. }
  357. case float64:
  358. values[timeIndex] = EpochPrecisionToMs(value)
  359. case *float64:
  360. if value != nil {
  361. values[timeIndex] = EpochPrecisionToMs(*value)
  362. }
  363. case float32:
  364. values[timeIndex] = EpochPrecisionToMs(float64(value))
  365. case *float32:
  366. if value != nil {
  367. values[timeIndex] = EpochPrecisionToMs(float64(*value))
  368. }
  369. }
  370. }
  371. }
  372. // ConvertSqlValueColumnToFloat converts timeseries value column to float.
  373. func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (null.Float, error) {
  374. var value null.Float
  375. switch typedValue := columnValue.(type) {
  376. case int:
  377. value = null.FloatFrom(float64(typedValue))
  378. case *int:
  379. if typedValue == nil {
  380. value.Valid = false
  381. } else {
  382. value = null.FloatFrom(float64(*typedValue))
  383. }
  384. case int64:
  385. value = null.FloatFrom(float64(typedValue))
  386. case *int64:
  387. if typedValue == nil {
  388. value.Valid = false
  389. } else {
  390. value = null.FloatFrom(float64(*typedValue))
  391. }
  392. case int32:
  393. value = null.FloatFrom(float64(typedValue))
  394. case *int32:
  395. if typedValue == nil {
  396. value.Valid = false
  397. } else {
  398. value = null.FloatFrom(float64(*typedValue))
  399. }
  400. case int16:
  401. value = null.FloatFrom(float64(typedValue))
  402. case *int16:
  403. if typedValue == nil {
  404. value.Valid = false
  405. } else {
  406. value = null.FloatFrom(float64(*typedValue))
  407. }
  408. case int8:
  409. value = null.FloatFrom(float64(typedValue))
  410. case *int8:
  411. if typedValue == nil {
  412. value.Valid = false
  413. } else {
  414. value = null.FloatFrom(float64(*typedValue))
  415. }
  416. case uint:
  417. value = null.FloatFrom(float64(typedValue))
  418. case *uint:
  419. if typedValue == nil {
  420. value.Valid = false
  421. } else {
  422. value = null.FloatFrom(float64(*typedValue))
  423. }
  424. case uint64:
  425. value = null.FloatFrom(float64(typedValue))
  426. case *uint64:
  427. if typedValue == nil {
  428. value.Valid = false
  429. } else {
  430. value = null.FloatFrom(float64(*typedValue))
  431. }
  432. case uint32:
  433. value = null.FloatFrom(float64(typedValue))
  434. case *uint32:
  435. if typedValue == nil {
  436. value.Valid = false
  437. } else {
  438. value = null.FloatFrom(float64(*typedValue))
  439. }
  440. case uint16:
  441. value = null.FloatFrom(float64(typedValue))
  442. case *uint16:
  443. if typedValue == nil {
  444. value.Valid = false
  445. } else {
  446. value = null.FloatFrom(float64(*typedValue))
  447. }
  448. case uint8:
  449. value = null.FloatFrom(float64(typedValue))
  450. case *uint8:
  451. if typedValue == nil {
  452. value.Valid = false
  453. } else {
  454. value = null.FloatFrom(float64(*typedValue))
  455. }
  456. case float64:
  457. value = null.FloatFrom(typedValue)
  458. case *float64:
  459. value = null.FloatFromPtr(typedValue)
  460. case float32:
  461. value = null.FloatFrom(float64(typedValue))
  462. case *float32:
  463. if typedValue == nil {
  464. value.Valid = false
  465. } else {
  466. value = null.FloatFrom(float64(*typedValue))
  467. }
  468. case nil:
  469. value.Valid = false
  470. default:
  471. return null.NewFloat(0, false), fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", columnName, typedValue, typedValue)
  472. }
  473. return value, nil
  474. }