sql_engine.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544
  1. package tsdb
  2. import (
  3. "container/list"
  4. "context"
  5. "database/sql"
  6. "fmt"
  7. "math"
  8. "strings"
  9. "sync"
  10. "time"
  11. "github.com/grafana/grafana/pkg/log"
  12. "github.com/grafana/grafana/pkg/components/null"
  13. "github.com/go-xorm/core"
  14. "github.com/go-xorm/xorm"
  15. "github.com/grafana/grafana/pkg/components/simplejson"
  16. "github.com/grafana/grafana/pkg/models"
  17. )
  18. // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and
  19. // timeRange to be able to generate queries that use from and to.
  20. type SqlMacroEngine interface {
  21. Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error)
  22. }
  23. // SqlTableRowTransformer transforms a query result row to RowValues with proper types.
  24. type SqlTableRowTransformer interface {
  25. Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error)
  26. }
  27. type engineCacheType struct {
  28. cache map[int64]*xorm.Engine
  29. versions map[int64]int
  30. sync.Mutex
  31. }
  32. var engineCache = engineCacheType{
  33. cache: make(map[int64]*xorm.Engine),
  34. versions: make(map[int64]int),
  35. }
  36. var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
  37. return xorm.NewEngine(driverName, connectionString)
  38. }
  39. type sqlQueryEndpoint struct {
  40. macroEngine SqlMacroEngine
  41. rowTransformer SqlTableRowTransformer
  42. engine *xorm.Engine
  43. timeColumnNames []string
  44. metricColumnTypes []string
  45. log log.Logger
  46. }
  47. type SqlQueryEndpointConfiguration struct {
  48. DriverName string
  49. Datasource *models.DataSource
  50. ConnectionString string
  51. TimeColumnNames []string
  52. MetricColumnTypes []string
  53. }
  54. var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) {
  55. queryEndpoint := sqlQueryEndpoint{
  56. rowTransformer: rowTransformer,
  57. macroEngine: macroEngine,
  58. timeColumnNames: []string{"time"},
  59. log: log,
  60. }
  61. if len(config.TimeColumnNames) > 0 {
  62. queryEndpoint.timeColumnNames = config.TimeColumnNames
  63. }
  64. engineCache.Lock()
  65. defer engineCache.Unlock()
  66. if engine, present := engineCache.cache[config.Datasource.Id]; present {
  67. if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version {
  68. queryEndpoint.engine = engine
  69. return &queryEndpoint, nil
  70. }
  71. }
  72. engine, err := NewXormEngine(config.DriverName, config.ConnectionString)
  73. if err != nil {
  74. return nil, err
  75. }
  76. engine.SetMaxOpenConns(10)
  77. engine.SetMaxIdleConns(10)
  78. engineCache.versions[config.Datasource.Id] = config.Datasource.Version
  79. engineCache.cache[config.Datasource.Id] = engine
  80. queryEndpoint.engine = engine
  81. return &queryEndpoint, nil
  82. }
  83. const rowLimit = 1000000
  84. // Query is the main function for the SqlQueryEndpoint
  85. func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) {
  86. result := &Response{
  87. Results: make(map[string]*QueryResult),
  88. }
  89. session := e.engine.NewSession()
  90. defer session.Close()
  91. db := session.DB()
  92. for _, query := range tsdbQuery.Queries {
  93. rawSQL := query.Model.Get("rawSql").MustString()
  94. if rawSQL == "" {
  95. continue
  96. }
  97. queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId}
  98. result.Results[query.RefId] = queryResult
  99. rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL)
  100. if err != nil {
  101. queryResult.Error = err
  102. continue
  103. }
  104. queryResult.Meta.Set("sql", rawSQL)
  105. rows, err := db.Query(rawSQL)
  106. if err != nil {
  107. queryResult.Error = err
  108. continue
  109. }
  110. defer rows.Close()
  111. format := query.Model.Get("format").MustString("time_series")
  112. switch format {
  113. case "time_series":
  114. err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery)
  115. if err != nil {
  116. queryResult.Error = err
  117. continue
  118. }
  119. case "table":
  120. err := e.transformToTable(query, rows, queryResult, tsdbQuery)
  121. if err != nil {
  122. queryResult.Error = err
  123. continue
  124. }
  125. }
  126. }
  127. return result, nil
  128. }
  129. func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  130. columnNames, err := rows.Columns()
  131. columnCount := len(columnNames)
  132. if err != nil {
  133. return err
  134. }
  135. rowCount := 0
  136. timeIndex := -1
  137. table := &Table{
  138. Columns: make([]TableColumn, columnCount),
  139. Rows: make([]RowValues, 0),
  140. }
  141. for i, name := range columnNames {
  142. table.Columns[i].Text = name
  143. for _, tc := range e.timeColumnNames {
  144. if name == tc {
  145. timeIndex = i
  146. break
  147. }
  148. }
  149. }
  150. columnTypes, err := rows.ColumnTypes()
  151. if err != nil {
  152. return err
  153. }
  154. for ; rows.Next(); rowCount++ {
  155. if rowCount > rowLimit {
  156. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  157. }
  158. values, err := e.rowTransformer.Transform(columnTypes, rows)
  159. if err != nil {
  160. return err
  161. }
  162. // converts column named time to unix timestamp in milliseconds
  163. // to make native mssql datetime types and epoch dates work in
  164. // annotation and table queries.
  165. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  166. table.Rows = append(table.Rows, values)
  167. }
  168. result.Tables = append(result.Tables, table)
  169. result.Meta.Set("rowCount", rowCount)
  170. return nil
  171. }
  172. func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error {
  173. pointsBySeries := make(map[string]*TimeSeries)
  174. seriesByQueryOrder := list.New()
  175. columnNames, err := rows.Columns()
  176. if err != nil {
  177. return err
  178. }
  179. columnTypes, err := rows.ColumnTypes()
  180. if err != nil {
  181. return err
  182. }
  183. rowCount := 0
  184. timeIndex := -1
  185. metricIndex := -1
  186. metricPrefix := false
  187. var metricPrefixValue string
  188. // check columns of resultset: a column named time is mandatory
  189. // the first text column is treated as metric name unless a column named metric is present
  190. for i, col := range columnNames {
  191. for _, tc := range e.timeColumnNames {
  192. if col == tc {
  193. timeIndex = i
  194. continue
  195. }
  196. }
  197. switch col {
  198. case "metric":
  199. metricIndex = i
  200. default:
  201. if metricIndex == -1 {
  202. columnType := columnTypes[i].DatabaseTypeName()
  203. for _, mct := range e.metricColumnTypes {
  204. if columnType == mct {
  205. metricIndex = i
  206. continue
  207. }
  208. }
  209. }
  210. }
  211. }
  212. // use metric column as prefix with multiple value columns
  213. if metricIndex != -1 && len(columnNames) > 3 {
  214. metricPrefix = true
  215. }
  216. if timeIndex == -1 {
  217. return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or "))
  218. }
  219. fillMissing := query.Model.Get("fill").MustBool(false)
  220. var fillInterval float64
  221. fillValue := null.Float{}
  222. if fillMissing {
  223. fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
  224. if !query.Model.Get("fillNull").MustBool(false) {
  225. fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
  226. fillValue.Valid = true
  227. }
  228. }
  229. for rows.Next() {
  230. var timestamp float64
  231. var value null.Float
  232. var metric string
  233. if rowCount > rowLimit {
  234. return fmt.Errorf("query row limit exceeded, limit %d", rowLimit)
  235. }
  236. values, err := e.rowTransformer.Transform(columnTypes, rows)
  237. if err != nil {
  238. return err
  239. }
  240. // converts column named time to unix timestamp in milliseconds to make
  241. // native mysql datetime types and epoch dates work in
  242. // annotation and table queries.
  243. ConvertSqlTimeColumnToEpochMs(values, timeIndex)
  244. switch columnValue := values[timeIndex].(type) {
  245. case int64:
  246. timestamp = float64(columnValue)
  247. case float64:
  248. timestamp = columnValue
  249. default:
  250. return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
  251. }
  252. if metricIndex >= 0 {
  253. if columnValue, ok := values[metricIndex].(string); ok {
  254. if metricPrefix {
  255. metricPrefixValue = columnValue
  256. } else {
  257. metric = columnValue
  258. }
  259. } else {
  260. return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex])
  261. }
  262. }
  263. for i, col := range columnNames {
  264. if i == timeIndex || i == metricIndex {
  265. continue
  266. }
  267. if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil {
  268. return err
  269. }
  270. if metricIndex == -1 {
  271. metric = col
  272. } else if metricPrefix {
  273. metric = metricPrefixValue + " " + col
  274. }
  275. series, exist := pointsBySeries[metric]
  276. if !exist {
  277. series = &TimeSeries{Name: metric}
  278. pointsBySeries[metric] = series
  279. seriesByQueryOrder.PushBack(metric)
  280. }
  281. if fillMissing {
  282. var intervalStart float64
  283. if !exist {
  284. intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
  285. } else {
  286. intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
  287. }
  288. // align interval start
  289. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  290. for i := intervalStart; i < timestamp; i += fillInterval {
  291. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  292. rowCount++
  293. }
  294. }
  295. series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)})
  296. e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
  297. }
  298. }
  299. for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
  300. key := elem.Value.(string)
  301. result.Series = append(result.Series, pointsBySeries[key])
  302. if fillMissing {
  303. series := pointsBySeries[key]
  304. // fill in values from last fetched value till interval end
  305. intervalStart := series.Points[len(series.Points)-1][1].Float64
  306. intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
  307. // align interval start
  308. intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
  309. for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
  310. series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)})
  311. rowCount++
  312. }
  313. }
  314. }
  315. result.Meta.Set("rowCount", rowCount)
  316. return nil
  317. }
  318. // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds
  319. // to make native datetime types and epoch dates work in annotation and table queries.
  320. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) {
  321. if timeIndex >= 0 {
  322. switch value := values[timeIndex].(type) {
  323. case time.Time:
  324. values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
  325. case *time.Time:
  326. if value != nil {
  327. values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond)
  328. }
  329. case int64:
  330. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  331. case *int64:
  332. if value != nil {
  333. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  334. }
  335. case uint64:
  336. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  337. case *uint64:
  338. if value != nil {
  339. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  340. }
  341. case int32:
  342. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  343. case *int32:
  344. if value != nil {
  345. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  346. }
  347. case uint32:
  348. values[timeIndex] = int64(EpochPrecisionToMs(float64(value)))
  349. case *uint32:
  350. if value != nil {
  351. values[timeIndex] = int64(EpochPrecisionToMs(float64(*value)))
  352. }
  353. case float64:
  354. values[timeIndex] = EpochPrecisionToMs(value)
  355. case *float64:
  356. if value != nil {
  357. values[timeIndex] = EpochPrecisionToMs(*value)
  358. }
  359. case float32:
  360. values[timeIndex] = EpochPrecisionToMs(float64(value))
  361. case *float32:
  362. if value != nil {
  363. values[timeIndex] = EpochPrecisionToMs(float64(*value))
  364. }
  365. }
  366. }
  367. }
  368. // ConvertSqlValueColumnToFloat converts timeseries value column to float.
  369. func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (null.Float, error) {
  370. var value null.Float
  371. switch typedValue := columnValue.(type) {
  372. case int:
  373. value = null.FloatFrom(float64(typedValue))
  374. case *int:
  375. if typedValue == nil {
  376. value.Valid = false
  377. } else {
  378. value = null.FloatFrom(float64(*typedValue))
  379. }
  380. case int64:
  381. value = null.FloatFrom(float64(typedValue))
  382. case *int64:
  383. if typedValue == nil {
  384. value.Valid = false
  385. } else {
  386. value = null.FloatFrom(float64(*typedValue))
  387. }
  388. case int32:
  389. value = null.FloatFrom(float64(typedValue))
  390. case *int32:
  391. if typedValue == nil {
  392. value.Valid = false
  393. } else {
  394. value = null.FloatFrom(float64(*typedValue))
  395. }
  396. case int16:
  397. value = null.FloatFrom(float64(typedValue))
  398. case *int16:
  399. if typedValue == nil {
  400. value.Valid = false
  401. } else {
  402. value = null.FloatFrom(float64(*typedValue))
  403. }
  404. case int8:
  405. value = null.FloatFrom(float64(typedValue))
  406. case *int8:
  407. if typedValue == nil {
  408. value.Valid = false
  409. } else {
  410. value = null.FloatFrom(float64(*typedValue))
  411. }
  412. case uint:
  413. value = null.FloatFrom(float64(typedValue))
  414. case *uint:
  415. if typedValue == nil {
  416. value.Valid = false
  417. } else {
  418. value = null.FloatFrom(float64(*typedValue))
  419. }
  420. case uint64:
  421. value = null.FloatFrom(float64(typedValue))
  422. case *uint64:
  423. if typedValue == nil {
  424. value.Valid = false
  425. } else {
  426. value = null.FloatFrom(float64(*typedValue))
  427. }
  428. case uint32:
  429. value = null.FloatFrom(float64(typedValue))
  430. case *uint32:
  431. if typedValue == nil {
  432. value.Valid = false
  433. } else {
  434. value = null.FloatFrom(float64(*typedValue))
  435. }
  436. case uint16:
  437. value = null.FloatFrom(float64(typedValue))
  438. case *uint16:
  439. if typedValue == nil {
  440. value.Valid = false
  441. } else {
  442. value = null.FloatFrom(float64(*typedValue))
  443. }
  444. case uint8:
  445. value = null.FloatFrom(float64(typedValue))
  446. case *uint8:
  447. if typedValue == nil {
  448. value.Valid = false
  449. } else {
  450. value = null.FloatFrom(float64(*typedValue))
  451. }
  452. case float64:
  453. value = null.FloatFrom(typedValue)
  454. case *float64:
  455. value = null.FloatFromPtr(typedValue)
  456. case float32:
  457. value = null.FloatFrom(float64(typedValue))
  458. case *float32:
  459. if typedValue == nil {
  460. value.Valid = false
  461. } else {
  462. value = null.FloatFrom(float64(*typedValue))
  463. }
  464. case nil:
  465. value.Valid = false
  466. default:
  467. return null.NewFloat(0, false), fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", columnName, typedValue, typedValue)
  468. }
  469. return value, nil
  470. }