2025-07-30 20:47:25 +08:00
package postgres
import (
2025-09-05 14:39:56 +08:00
"encoding/json"
2025-07-30 20:47:25 +08:00
"fmt"
"math/rand"
2025-09-26 22:23:40 +08:00
"os"
2025-07-30 20:47:25 +08:00
"strings"
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/tsdb/grafana-postgresql-datasource/sqleng"
2025-09-08 21:49:49 +08:00
"github.com/grafana/grafana/pkg/util/testutil"
2025-07-30 20:47:25 +08:00
_ "github.com/lib/pq"
)
// Test generateConnectionString.
func TestIntegrationGenerateConnectionStringPGX ( t * testing . T ) {
2025-09-08 21:49:49 +08:00
testutil . SkipIntegrationTestInShortMode ( t )
2025-07-30 20:47:25 +08:00
testCases := [ ] struct {
desc string
host string
user string
password string
database string
tlsSettings tlsSettings
expConnStr string
expErr string
uid string
} {
{
desc : "Unix socket host" ,
host : "/var/run/postgresql" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='/var/run/postgresql' dbname='database' password='password' sslmode='verify-full'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "TCP host" ,
host : "host" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='host' dbname='database' password='password' sslmode='verify-full'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "verify-ca automatically adds disable-sni" ,
host : "host:1234" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-ca" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='host' dbname='database' password='password' port=1234 sslmode='verify-ca' sslsni=0" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "TCP/port host" ,
host : "host:1234" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='host' dbname='database' password='password' port=1234 sslmode='verify-full'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "Ipv6 host" ,
host : "[::1]" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='::1' dbname='database' password='password' sslmode='verify-full'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "Ipv6/port host" ,
host : "[::1]:1234" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='::1' dbname='database' password='password' port=1234 sslmode='verify-full'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "Invalid port" ,
host : "host:invalid" ,
user : "user" ,
database : "database" ,
tlsSettings : tlsSettings { } ,
expErr : "error parsing postgres url" ,
} ,
{
desc : "Password with single quote and backslash" ,
host : "host" ,
user : "user" ,
password : ` p'\assword ` ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : ` user='user' host='host' dbname='database' password='p\'\\assword' sslmode='verify-full' ` ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "User/DB with single quote and backslash" ,
host : "host" ,
user : ` u'\ser ` ,
password : ` password ` ,
database : ` d'\atabase ` ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
2025-09-26 22:23:40 +08:00
expConnStr : ` user='u\'\\ser' host='host' dbname='d\'\\atabase' password='password' sslmode='verify-full' ` ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "Custom TLS mode disabled" ,
host : "host" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "disable" } ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='host' dbname='database' password='password' sslmode='disable'" ,
2025-07-30 20:47:25 +08:00
} ,
{
desc : "Custom TLS mode verify-full with certificate files" ,
host : "host" ,
user : "user" ,
password : "password" ,
database : "database" ,
tlsSettings : tlsSettings {
Mode : "verify-full" ,
RootCertFile : "i/am/coding/ca.crt" ,
CertFile : "i/am/coding/client.crt" ,
CertKeyFile : "i/am/coding/client.key" ,
} ,
2025-09-26 22:23:40 +08:00
expConnStr : "user='user' host='host' dbname='database' password='password' sslmode='verify-full' " +
2025-07-30 20:47:25 +08:00
"sslrootcert='i/am/coding/ca.crt' sslcert='i/am/coding/client.crt' sslkey='i/am/coding/client.key'" ,
} ,
2025-09-26 22:23:40 +08:00
{
desc : "No password" ,
host : "host" ,
user : "user" ,
password : "" ,
database : "database" ,
tlsSettings : tlsSettings { Mode : "verify-full" } ,
expConnStr : "user='user' host='host' dbname='database' sslmode='verify-full'" ,
} ,
2025-07-30 20:47:25 +08:00
}
for _ , tt := range testCases {
t . Run ( tt . desc , func ( t * testing . T ) {
ds := sqleng . DataSourceInfo {
URL : tt . host ,
User : tt . user ,
DecryptedSecureJSONData : map [ string ] string { "password" : tt . password } ,
Database : tt . database ,
UID : tt . uid ,
}
2025-10-02 20:12:47 +08:00
logger := backend . NewLoggerWith ( "logger" , "tsdb.postgres" )
2025-07-30 20:47:25 +08:00
2025-10-02 20:12:47 +08:00
connStr , err := generateConnectionString ( ds , tt . tlsSettings , false , logger )
2025-07-30 20:47:25 +08:00
if tt . expErr == "" {
require . NoError ( t , err , tt . desc )
assert . Equal ( t , tt . expConnStr , connStr )
} else {
require . Error ( t , err , tt . desc )
assert . True ( t , strings . HasPrefix ( err . Error ( ) , tt . expErr ) ,
fmt . Sprintf ( "%s: %q doesn't start with %q" , tt . desc , err , tt . expErr ) )
}
} )
}
}
// To run this test, set runPostgresTests=true
// Or from the commandline: GRAFANA_TEST_DB=postgres go test -tags=integration -v ./pkg/tsdb/grafana-postgresql-datasource
// The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest!
// Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
// preconfigured Postgres server suitable for running these tests.
// There is also a datasource and dashboard provisioned by devenv scripts that you can
// use to verify that the generated data are visualized as expected, see
// devenv/README.md for setup instructions.
func TestIntegrationPostgresPGX ( t * testing . T ) {
2025-09-08 21:49:49 +08:00
testutil . SkipIntegrationTestInShortMode ( t )
2025-07-30 20:47:25 +08:00
// change to true to run the PostgreSQL tests
const runPostgresTests = false
if ! isTestDbPostgres ( ) && ! runPostgresTests {
t . Skip ( )
}
origInterpolate := sqleng . Interpolate
t . Cleanup ( func ( ) {
sqleng . Interpolate = origInterpolate
} )
sqleng . Interpolate = func ( query backend . DataQuery , timeRange backend . TimeRange , timeInterval string , sql string ) string {
return sql
}
jsonData := sqleng . JsonData {
MaxOpenConns : 10 ,
MaxIdleConns : 2 ,
ConnMaxLifetime : 14400 ,
Timescaledb : false ,
Mode : "disable" ,
ConfigurationMethod : "file-path" ,
}
dsInfo := sqleng . DataSourceInfo {
JsonData : jsonData ,
DecryptedSecureJSONData : map [ string ] string { } ,
}
logger := backend . NewLoggerWith ( "logger" , "postgres.test" )
cnnstr := postgresTestDBConnString ( )
2025-08-25 19:31:57 +08:00
p , exe , err := newPostgresPGX ( t . Context ( ) , "error" , 10000 , dsInfo , cnnstr , logger , backend . DataSourceInstanceSettings { } )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
fromStart := time . Date ( 2018 , 3 , 15 , 13 , 0 , 0 , 0 , time . UTC ) . In ( time . Local )
t . Run ( "Given a table with different native data types" , func ( t * testing . T ) {
sql := `
DROP TABLE IF EXISTS postgres_types ;
CREATE TABLE postgres_types (
c00_smallint smallint ,
c01_integer integer ,
c02_bigint bigint ,
c03_real real ,
c04_double double precision ,
c05_decimal decimal ( 10 , 2 ) ,
c06_numeric numeric ( 10 , 2 ) ,
c07_char char ( 10 ) ,
c08_varchar varchar ( 10 ) ,
c09_text text ,
c10_timestamp timestamp without time zone ,
c11_timestamptz timestamp with time zone ,
c12_date date ,
c13_time time without time zone ,
c14_timetz time with time zone ,
time date ,
c15_interval interval ,
2025-09-05 14:39:56 +08:00
c16_smallint smallint ,
c17_json json ,
c18_jsonb jsonb
2025-07-30 20:47:25 +08:00
) ;
`
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , sql )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
sql = `
INSERT INTO postgres_types VALUES (
1 , 2 , 3 ,
4.5 , 6.7 , 1.1 , 1.2 ,
' char10 ',' varchar10 ',' text ' ,
2025-09-05 14:39:56 +08:00
now ( ) , now ( ) , now ( ) , now ( ) , now ( ) , now ( ) , ' 15 m ' : : interval , null ,
' { "key1" : "value1" } ' : : json , ' { "key2" : "value2" } ' : : jsonb
2025-07-30 20:47:25 +08:00
) ;
`
2025-08-25 19:31:57 +08:00
_ , err = p . Exec ( t . Context ( ) , sql )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
t . Run ( "When doing a table query should map Postgres column types to Go types" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT * FROM postgres_types" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
2025-09-05 14:39:56 +08:00
require . Len ( t , frames [ 0 ] . Fields , 20 )
2025-07-30 20:47:25 +08:00
require . Equal ( t , int16 ( 1 ) , * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * int16 ) )
require . Equal ( t , int32 ( 2 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 0 ) . ( * int32 ) )
require . Equal ( t , int64 ( 3 ) , * frames [ 0 ] . Fields [ 2 ] . At ( 0 ) . ( * int64 ) )
require . Equal ( t , float64 ( 4.5 ) , * frames [ 0 ] . Fields [ 3 ] . At ( 0 ) . ( * float64 ) )
require . Equal ( t , float64 ( 6.7 ) , * frames [ 0 ] . Fields [ 4 ] . At ( 0 ) . ( * float64 ) )
require . Equal ( t , float64 ( 1.1 ) , * frames [ 0 ] . Fields [ 5 ] . At ( 0 ) . ( * float64 ) )
require . Equal ( t , float64 ( 1.2 ) , * frames [ 0 ] . Fields [ 6 ] . At ( 0 ) . ( * float64 ) )
require . Equal ( t , "char10 " , * frames [ 0 ] . Fields [ 7 ] . At ( 0 ) . ( * string ) )
require . Equal ( t , "varchar10" , * frames [ 0 ] . Fields [ 8 ] . At ( 0 ) . ( * string ) )
require . Equal ( t , "text" , * frames [ 0 ] . Fields [ 9 ] . At ( 0 ) . ( * string ) )
_ , ok := frames [ 0 ] . Fields [ 10 ] . At ( 0 ) . ( * time . Time )
require . True ( t , ok )
_ , ok = frames [ 0 ] . Fields [ 11 ] . At ( 0 ) . ( * time . Time )
require . True ( t , ok )
_ , ok = frames [ 0 ] . Fields [ 12 ] . At ( 0 ) . ( * time . Time )
require . True ( t , ok )
_ , ok = frames [ 0 ] . Fields [ 13 ] . At ( 0 ) . ( * string )
require . True ( t , ok )
_ , ok = frames [ 0 ] . Fields [ 14 ] . At ( 0 ) . ( * string )
require . True ( t , ok )
_ , ok = frames [ 0 ] . Fields [ 15 ] . At ( 0 ) . ( * time . Time )
require . True ( t , ok )
require . Equal ( t , "00:15:00" , * frames [ 0 ] . Fields [ 16 ] . At ( 0 ) . ( * string ) )
require . Nil ( t , frames [ 0 ] . Fields [ 17 ] . At ( 0 ) )
2025-09-05 14:39:56 +08:00
_ , ok = frames [ 0 ] . Fields [ 18 ] . At ( 0 ) . ( * json . RawMessage )
require . True ( t , ok )
require . Equal ( t , json . RawMessage ( ` { "key1": "value1"} ` ) , * frames [ 0 ] . Fields [ 18 ] . At ( 0 ) . ( * json . RawMessage ) )
_ , ok = frames [ 0 ] . Fields [ 19 ] . At ( 0 ) . ( * json . RawMessage )
require . True ( t , ok )
require . Equal ( t , json . RawMessage ( ` { "key2": "value2"} ` ) , * frames [ 0 ] . Fields [ 19 ] . At ( 0 ) . ( * json . RawMessage ) )
2025-07-30 20:47:25 +08:00
} )
} )
t . Run ( "Given a table with metrics that lacks data for some series " , func ( t * testing . T ) {
sql := `
DROP TABLE IF EXISTS metric ;
CREATE TABLE metric (
time timestamp ,
value integer
)
`
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , sql )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
type metric struct {
Time time . Time
Value int64
}
series := [ ] * metric { }
firstRange := genTimeRangeByInterval ( fromStart , 10 * time . Minute , 10 * time . Second )
secondRange := genTimeRangeByInterval ( fromStart . Add ( 20 * time . Minute ) , 10 * time . Minute , 10 * time . Second )
for _ , t := range firstRange {
series = append ( series , & metric {
Time : t ,
Value : 15 ,
} )
}
for _ , t := range secondRange {
series = append ( series , & metric {
Time : t ,
Value : 20 ,
} )
}
for _ , m := range series {
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , ` INSERT INTO metric ("time", value) VALUES ($1, $2) ` , m . Time . UTC ( ) , m . Value )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
}
t . Run ( "When doing a metric query using timeGroup" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . Equal ( t , 4 , frames [ 0 ] . Fields [ 0 ] . Len ( ) )
// without fill this should result in 4 buckets
dt := fromStart
for i := 0 ; i < 2 ; i ++ {
aValue := * frames [ 0 ] . Fields [ 1 ] . At ( i ) . ( * float64 )
aTime := * frames [ 0 ] . Fields [ 0 ] . At ( i ) . ( * time . Time )
require . Equal ( t , float64 ( 15 ) , aValue )
require . Equal ( t , dt , aTime )
dt = dt . Add ( 5 * time . Minute )
}
// adjust for 10 minute gap between first and second set of points
dt = dt . Add ( 10 * time . Minute )
for i := 2 ; i < 4 ; i ++ {
aValue := * frames [ 0 ] . Fields [ 1 ] . At ( i ) . ( * float64 )
aTime := * frames [ 0 ] . Fields [ 0 ] . At ( i ) . ( * time . Time )
require . Equal ( t , float64 ( 20 ) , aValue )
require . Equal ( t , dt , aTime )
dt = dt . Add ( 5 * time . Minute )
}
} )
2025-08-25 19:31:57 +08:00
t . Run ( "When doing a query without a format should default to time_series" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1 "
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-08-25 19:31:57 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . Len ( t , frames [ 0 ] . Fields , 2 )
} )
2025-07-30 20:47:25 +08:00
t . Run ( "When doing a metric query using timeGroup and $__interval" , func ( t * testing . T ) {
mockInterpolate := sqleng . Interpolate
sqleng . Interpolate = origInterpolate
t . Cleanup ( func ( ) {
sqleng . Interpolate = mockInterpolate
} )
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
Interval : time . Second * 60 ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 30 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
frames := queryResult . Frames
require . NoError ( t , queryResult . Error )
require . Equal ( t ,
"SELECT floor(extract(epoch from time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
frames [ 0 ] . Meta . ExecutedQueryString )
} )
t . Run ( "When doing a metric query using timeGroup with NULL fill enabled" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 34 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 7 , frames [ 0 ] . Fields [ 0 ] . Len ( ) )
dt := fromStart
for i := 0 ; i < 2 ; i ++ {
aValue := * frames [ 0 ] . Fields [ 1 ] . At ( i ) . ( * float64 )
aTime := * frames [ 0 ] . Fields [ 0 ] . At ( i ) . ( * time . Time )
require . Equal ( t , float64 ( 15 ) , aValue )
require . True ( t , aTime . Equal ( dt ) )
dt = dt . Add ( 5 * time . Minute )
}
// check for NULL values inserted by fill
require . Nil ( t , frames [ 0 ] . Fields [ 1 ] . At ( 2 ) )
require . Nil ( t , frames [ 0 ] . Fields [ 1 ] . At ( 3 ) )
// adjust for 10 minute gap between first and second set of points
dt = dt . Add ( 10 * time . Minute )
for i := 4 ; i < 6 ; i ++ {
aValue := * frames [ 0 ] . Fields [ 1 ] . At ( i ) . ( * float64 )
aTime := * frames [ 0 ] . Fields [ 0 ] . At ( i ) . ( * time . Time )
require . Equal ( t , float64 ( 20 ) , aValue )
require . True ( t , aTime . Equal ( dt ) )
dt = dt . Add ( 5 * time . Minute )
}
// check for NULL values inserted by fill
require . Nil ( t , frames [ 0 ] . Fields [ 1 ] . At ( 6 ) )
} )
t . Run ( "When doing a metric query using timeGroup with value fill enabled" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, '5m', 1.5) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 34 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 1.5 , * frames [ 0 ] . Fields [ 1 ] . At ( 3 ) . ( * float64 ) )
} )
} )
t . Run ( "Given a table with one data point" , func ( t * testing . T ) {
type metric struct {
Time time . Time
Value int64
}
startTime := time . Now ( ) . UTC ( ) . Add ( - time . Minute * 5 )
series := [ ] * metric {
{
Time : startTime ,
Value : 33 ,
} ,
}
for _ , m := range series {
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , ` INSERT INTO metric ("time", value) VALUES ($1, $2) ` , m . Time . UTC ( ) , m . Value )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
}
t . Run ( "querying with time group with default value" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "WITH data AS (SELECT now()-'3m'::interval AS ts, 42 AS n) SELECT $__timeGroup(ts, '1m', 0), n FROM data" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : startTime ,
To : startTime . Add ( 5 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , "Time" , frames [ 0 ] . Fields [ 0 ] . Name )
require . Equal ( t , "n" , frames [ 0 ] . Fields [ 1 ] . Name )
require . Equal ( t , float64 ( 0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 0 ) . ( * float64 ) )
require . Equal ( t , float64 ( 0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 1 ) . ( * float64 ) )
require . Equal ( t , float64 ( 42 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 2 ) . ( * float64 ) )
require . Equal ( t , float64 ( 0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 3 ) . ( * float64 ) )
require . Equal ( t , float64 ( 0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 4 ) . ( * float64 ) )
require . Equal ( t , float64 ( 0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 5 ) . ( * float64 ) )
} )
} )
t . Run ( "When doing a metric query using timeGroup with previous fill enabled" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeGroup(time, '5m', previous), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 34 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , float64 ( 15.0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 2 ) . ( * float64 ) )
require . Equal ( t , float64 ( 15.0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 3 ) . ( * float64 ) )
require . Equal ( t , float64 ( 20.0 ) , * frames [ 0 ] . Fields [ 1 ] . At ( 6 ) . ( * float64 ) )
} )
t . Run ( "Given a table with metrics having multiple values and measurements" , func ( t * testing . T ) {
type metric_values struct {
Time time . Time
TimeInt64 int64
TimeInt64Nullable * int64
TimeFloat64 float64
TimeFloat64Nullable * float64
TimeInt32 int32
TimeInt32Nullable * int32
TimeFloat32 float32
TimeFloat32Nullable * float32
Measurement string
ValueOne int64
ValueTwo int64
}
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , "DROP TABLE IF EXISTS metric_values" )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
2025-08-25 19:31:57 +08:00
_ , err = p . Exec ( t . Context ( ) , ` CREATE TABLE metric_values (
2025-07-30 20:47:25 +08:00
"time" TIMESTAMP NULL ,
"timeInt64" BIGINT NOT NULL , "timeInt64Nullable" BIGINT NULL ,
"timeFloat64" DOUBLE PRECISION NOT NULL , "timeFloat64Nullable" DOUBLE PRECISION NULL ,
"timeInt32" INTEGER NOT NULL , "timeInt32Nullable" INTEGER NULL ,
"timeFloat32" DOUBLE PRECISION NOT NULL , "timeFloat32Nullable" DOUBLE PRECISION NULL ,
measurement VARCHAR ( 255 ) NULL ,
"valueOne" INTEGER NULL , "valueTwo" INTEGER NULL
) ` )
require . NoError ( t , err )
rng := rand . New ( rand . NewSource ( time . Now ( ) . Unix ( ) ) )
rnd := func ( min , max int64 ) int64 {
return rng . Int63n ( max - min ) + min
}
var tInitial time . Time
series := [ ] * metric_values { }
for i , t := range genTimeRangeByInterval ( fromStart . Add ( - 30 * time . Minute ) , 90 * time . Minute , 5 * time . Minute ) {
if i == 0 {
tInitial = t
}
tSeconds := t . Unix ( )
tSecondsInt32 := int32 ( tSeconds )
tSecondsFloat32 := float32 ( tSeconds )
tMilliseconds := tSeconds * 1e3
tMillisecondsFloat := float64 ( tMilliseconds )
first := metric_values {
Time : t ,
TimeInt64 : tMilliseconds ,
TimeInt64Nullable : & ( tMilliseconds ) ,
TimeFloat64 : tMillisecondsFloat ,
TimeFloat64Nullable : & tMillisecondsFloat ,
TimeInt32 : tSecondsInt32 ,
TimeInt32Nullable : & tSecondsInt32 ,
TimeFloat32 : tSecondsFloat32 ,
TimeFloat32Nullable : & tSecondsFloat32 ,
Measurement : "Metric A" ,
ValueOne : rnd ( 0 , 100 ) ,
ValueTwo : rnd ( 0 , 100 ) ,
}
second := first
second . Measurement = "Metric B"
second . ValueOne = rnd ( 0 , 100 )
second . ValueTwo = rnd ( 0 , 100 )
series = append ( series , & first )
series = append ( series , & second )
}
// _, err = session.InsertMulti(series)
for _ , m := range series {
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , ` INSERT INTO "metric_values" (
2025-07-30 20:47:25 +08:00
time ,
"timeInt64" , "timeInt64Nullable" ,
"timeFloat64" , "timeFloat64Nullable" ,
"timeInt32" , "timeInt32Nullable" ,
"timeFloat32" , "timeFloat32Nullable" ,
measurement , "valueOne" , "valueTwo"
) VALUES ( $ 1 , $ 2 , $ 3 , $ 4 , $ 5 , $ 6 , $ 7 , $ 8 , $ 9 , $ 10 , $ 11 , $ 12 ) ` ,
m . Time ,
m . TimeInt64 , m . TimeInt64Nullable ,
m . TimeFloat64 , m . TimeFloat64Nullable ,
m . TimeInt32 , m . TimeInt32Nullable ,
m . TimeFloat32 , m . TimeFloat32Nullable ,
m . Measurement , m . ValueOne , m . ValueTwo ,
)
require . NoError ( t , err )
}
t . Run (
"When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeInt64\" as time, \"timeInt64\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable,) should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeInt64Nullable\" as time, \"timeInt64Nullable\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (float64) as time column and value column (float64), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeFloat64\" as time, \"timeFloat64\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeFloat64Nullable\" as time, \"timeFloat64Nullable\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (int32) as time column and value column (int32), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeInt32\" as time, \"timeInt32\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeInt32Nullable\" as time, \"timeInt32Nullable\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . True ( t , tInitial . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (float32) as time column and value column (float32), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeFloat32\" as time, \"timeFloat32\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
aTime := time . Unix ( 0 , int64 ( float64 ( float32 ( tInitial . Unix ( ) ) ) * 1e3 ) * int64 ( time . Millisecond ) )
require . True ( t , aTime . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable), should return metric with time in time.Time" ,
func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"timeFloat32Nullable\" as time, \"timeFloat32Nullable\" FROM metric_values ORDER BY time LIMIT 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
aTime := time . Unix ( 0 , int64 ( float64 ( float32 ( tInitial . Unix ( ) ) ) * 1e3 ) * int64 ( time . Millisecond ) )
require . True ( t , aTime . Equal ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) )
} )
t . Run ( "When doing a metric query grouping by time and select metric column should return correct series" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , "Metric A - value one" , frames [ 0 ] . Fields [ 1 ] . Name )
require . Equal ( t , "Metric B - value one" , frames [ 0 ] . Fields [ 2 ] . Name )
} )
t . Run ( "When doing a metric query with metric column and multiple value columns" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeEpoch(time), measurement as metric, \"valueOne\", \"valueTwo\" FROM metric_values ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . NoError ( t , err )
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 5 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , "valueOne" , frames [ 0 ] . Fields [ 1 ] . Name )
require . Equal ( t , data . Labels { "metric" : "Metric A" } , frames [ 0 ] . Fields [ 1 ] . Labels )
require . Equal ( t , "valueOne" , frames [ 0 ] . Fields [ 2 ] . Name )
require . Equal ( t , data . Labels { "metric" : "Metric B" } , frames [ 0 ] . Fields [ 2 ] . Labels )
require . Equal ( t , "valueTwo" , frames [ 0 ] . Fields [ 3 ] . Name )
require . Equal ( t , data . Labels { "metric" : "Metric A" } , frames [ 0 ] . Fields [ 3 ] . Labels )
require . Equal ( t , "valueTwo" , frames [ 0 ] . Fields [ 4 ] . Name )
require . Equal ( t , data . Labels { "metric" : "Metric B" } , frames [ 0 ] . Fields [ 4 ] . Labels )
} )
t . Run ( "When doing a metric query grouping by time should return correct series" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT $__timeEpoch(time), \"valueOne\", \"valueTwo\" FROM metric_values ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , "valueOne" , frames [ 0 ] . Fields [ 1 ] . Name )
require . Equal ( t , "valueTwo" , frames [ 0 ] . Fields [ 2 ] . Name )
} )
t . Run ( "When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros" , func ( t * testing . T ) {
fakeInterpolate := sqleng . Interpolate
t . Cleanup ( func ( ) {
sqleng . Interpolate = fakeInterpolate
} )
sqleng . Interpolate = origInterpolate
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : fromStart . Add ( - 5 * time . Minute ) ,
To : fromStart ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . Equal ( t ,
"SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1" ,
frames [ 0 ] . Meta . ExecutedQueryString )
} )
} )
t . Run ( "Given a table with event data" , func ( t * testing . T ) {
type event struct {
TimeSec int64
Description string
Tags string
}
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , "DROP TABLE IF EXISTS event" )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
2025-08-25 19:31:57 +08:00
_ , err = p . Exec ( t . Context ( ) , ` CREATE TABLE event (time_sec BIGINT NULL, description VARCHAR(255) NULL, tags VARCHAR(255) NULL) ` )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
events := [ ] * event { }
for _ , t := range genTimeRangeByInterval ( fromStart . Add ( - 20 * time . Minute ) , time . Hour , 25 * time . Minute ) {
events = append ( events , & event {
TimeSec : t . Unix ( ) ,
Description : "Someone deployed something" ,
Tags : "deploy" ,
} )
events = append ( events , & event {
TimeSec : t . Add ( 5 * time . Minute ) . Unix ( ) ,
Description : "New support ticket registered" ,
Tags : "ticket" ,
} )
}
for _ , e := range events {
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , "INSERT INTO event (time_sec, description, tags) VALUES ($1, $2, $3)" , e . TimeSec , e . Description , e . Tags )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
}
t . Run ( "When doing an annotation query of deploy events should return expected result" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"time_sec\" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC" ,
"format" : "table"
} ` ) ,
RefID : "Deploys" ,
TimeRange : backend . TimeRange {
From : fromStart . Add ( - 20 * time . Minute ) ,
To : fromStart . Add ( 40 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "Deploys" ]
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . Len ( t , frames [ 0 ] . Fields , 3 )
} )
t . Run ( "When doing an annotation query of ticket events should return expected result" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT \"time_sec\" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC" ,
"format" : "table"
} ` ) ,
RefID : "Tickets" ,
TimeRange : backend . TimeRange {
From : fromStart . Add ( - 20 * time . Minute ) ,
To : fromStart . Add ( 40 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "Tickets" ]
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
} )
t . Run ( "When doing an annotation query with a time column in datetime format" , func ( t * testing . T ) {
dt := time . Date ( 2018 , 3 , 14 , 21 , 20 , 6 , 527e6 , time . UTC )
dtFormat := "2006-01-02 15:04:05.999999999"
queryjson := fmt . Sprintf ( "{\"rawSql\": \"SELECT CAST('%s' AS TIMESTAMP) as time, 'message' as text, 'tag1,tag2' as tags\", \"format\": \"table\" }" , dt . Format ( dtFormat ) )
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( queryjson ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
require . Equal ( t , dt . Unix ( ) , ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) . Unix ( ) )
} )
t . Run ( "When doing an annotation query with a time column in epoch second format should return time.Time" , func ( t * testing . T ) {
dt := time . Date ( 2018 , 3 , 14 , 21 , 20 , 6 , 527e6 , time . UTC )
queryjson := fmt . Sprintf ( "{\"rawSql\": \"SELECT %d as time, 'message' as text, 'tag1,tag2' as tags\", \"format\": \"table\"}" , dt . Unix ( ) )
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( queryjson ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
require . Equal ( t , dt . Unix ( ) , ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) . Unix ( ) )
} )
t . Run ( "When doing an annotation query with a time column in epoch second format (t *testing.Tint) should return time.Time" , func ( t * testing . T ) {
dt := time . Date ( 2018 , 3 , 14 , 21 , 20 , 6 , 527e6 , time . UTC )
queryjson := fmt . Sprintf ( "{\"rawSql\": \"SELECT cast(%d as bigint) as time, 'message' as text, 'tag1,tag2' as tags\", \"format\": \"table\"}" , dt . Unix ( ) )
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( queryjson ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
require . Equal ( t , dt . Unix ( ) , ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) . Unix ( ) )
} )
t . Run ( "When doing an annotation query with a time column in epoch millisecond format should return time.Time" , func ( t * testing . T ) {
dt := time . Date ( 2018 , 3 , 14 , 21 , 20 , 6 , 527e6 , time . UTC )
queryjson := fmt . Sprintf ( "{\"rawSql\":\"SELECT %d as time, 'message' as text, 'tag1,tag2' as tags\", \"format\": \"table\"}" , dt . Unix ( ) * 1000 )
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( queryjson ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
require . Equal ( t , dt . Unix ( ) , ( * frames [ 0 ] . Fields [ 0 ] . At ( 0 ) . ( * time . Time ) ) . Unix ( ) )
} )
t . Run ( "When doing an annotation query with a time column holding a bigint null value should return nil" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT cast(null as bigint) as time, 'message' as text, 'tag1,tag2' as tags" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
require . Nil ( t , frames [ 0 ] . Fields [ 0 ] . At ( 0 ) )
} )
t . Run ( "When doing an annotation query with a time column holding a timestamp null value should return nil" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT cast(null as timestamp) as time, 'message' as text, 'tag1,tag2' as tags" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 3 , len ( frames [ 0 ] . Fields ) )
// Should be in time.Time
assert . Nil ( t , frames [ 0 ] . Fields [ 0 ] . At ( 0 ) )
} )
t . Run ( "When doing an annotation query with a time and timeend column should return two fields of type time" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT 1631053772276 as time, 1631054012276 as timeend, '' as text, '' as tags" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 4 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , data . FieldTypeNullableTime , frames [ 0 ] . Fields [ 0 ] . Type ( ) )
require . Equal ( t , data . FieldTypeNullableTime , frames [ 0 ] . Fields [ 1 ] . Type ( ) )
} )
t . Run ( "When row limit set to 1" , func ( t * testing . T ) {
jsonData := sqleng . JsonData {
MaxOpenConns : 10 ,
MaxIdleConns : 2 ,
ConnMaxLifetime : 14400 ,
Timescaledb : false ,
Mode : "disable" ,
ConfigurationMethod : "file-path" ,
}
dsInfo := sqleng . DataSourceInfo {
JsonData : jsonData ,
DecryptedSecureJSONData : map [ string ] string { } ,
}
2025-08-25 19:31:57 +08:00
_ , handler , err := newPostgresPGX ( t . Context ( ) , "error" , 1 , dsInfo , cnnstr , logger , backend . DataSourceInstanceSettings { } )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
t . Run ( "When doing a table query that returns 2 rows should limit the result to 1 row" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT 1 as value UNION ALL select 2 as value" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : time . Now ( ) ,
To : time . Now ( ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := handler . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . NoError ( t , err )
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 1 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , 1 , frames [ 0 ] . Rows ( ) )
require . Len ( t , frames [ 0 ] . Meta . Notices , 1 )
require . Equal ( t , data . NoticeSeverityWarning , frames [ 0 ] . Meta . Notices [ 0 ] . Severity )
} )
t . Run ( "When doing a time series query that returns 2 rows should limit the result to 1 row" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT 1 as time, 1 as value UNION ALL select 2 as time, 2 as value" ,
"format" : "time_series"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : time . Now ( ) ,
To : time . Now ( ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := handler . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . NoError ( t , err )
require . Equal ( t , 1 , len ( frames ) )
require . Equal ( t , 2 , len ( frames [ 0 ] . Fields ) )
require . Equal ( t , 1 , frames [ 0 ] . Rows ( ) )
require . Len ( t , frames [ 0 ] . Meta . Notices , 1 )
require . Equal ( t , data . NoticeSeverityWarning , frames [ 0 ] . Meta . Notices [ 0 ] . Severity )
} )
} )
} )
t . Run ( "Given an empty table" , func ( t * testing . T ) {
2025-08-25 19:31:57 +08:00
_ , err := p . Exec ( t . Context ( ) , "DROP TABLE IF EXISTS empty_obj" )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
2025-08-25 19:31:57 +08:00
_ , err = p . Exec ( t . Context ( ) , "CREATE TABLE empty_obj (empty_key VARCHAR(255) NULL, empty_val BIGINT NULL)" )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
t . Run ( "When no rows are returned, should return an empty frame" , func ( t * testing . T ) {
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
JSON : [ ] byte ( ` {
"rawSql" : "SELECT empty_key, empty_val FROM empty_obj" ,
"format" : "table"
} ` ) ,
RefID : "A" ,
TimeRange : backend . TimeRange {
From : time . Now ( ) ,
To : time . Now ( ) . Add ( 1 * time . Minute ) ,
} ,
} ,
} ,
}
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-07-30 20:47:25 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
frames := queryResult . Frames
require . Len ( t , frames , 1 )
require . Equal ( t , 0 , frames [ 0 ] . Rows ( ) )
require . NotNil ( t , frames [ 0 ] . Fields )
require . Empty ( t , frames [ 0 ] . Fields )
} )
2025-09-08 16:37:54 +08:00
t . Run ( "Should handle multiple result sets without panicking" , func ( t * testing . T ) {
// Create a test table for the panic scenario test
sql := `
DROP TABLE IF EXISTS test_multi_results ;
CREATE TABLE test_multi_results (
id integer ,
name text ,
value numeric
) ;
INSERT INTO test_multi_results VALUES
( 1 , ' test1 ' , 10.5 ) ,
( 2 , ' test2 ' , 20.7 ) ,
( 3 , ' test3 ' , 30.2 ) ;
`
_ , err := p . Exec ( t . Context ( ) , sql )
require . NoError ( t , err )
t . Run ( "Should handle compatible multiple result sets" , func ( t * testing . T ) {
// This query returns multiple result sets with the same structure
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
RefID : "A" ,
JSON : [ ] byte ( ` {
"rawSql" : "SELECT id, name FROM test_multi_results WHERE id <= 2; SELECT id, name FROM test_multi_results WHERE id >= 2;" ,
"format" : "table"
} ` ) ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 1 * time . Hour ) ,
} ,
} ,
} ,
}
// This should not panic and should work correctly
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-09-08 16:37:54 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
// The frame should be properly constructed from both SELECT results
frame := frames [ 0 ]
require . Equal ( t , 2 , len ( frame . Fields ) ) // id, name from both queries
require . Equal ( t , "id" , frame . Fields [ 0 ] . Name )
require . Equal ( t , "name" , frame . Fields [ 1 ] . Name )
require . Equal ( t , 4 , frame . Rows ( ) ) // 2 rows from first result + 2 rows from second result
} )
t . Run ( "Should return error for incompatible multiple result sets" , func ( t * testing . T ) {
// This query returns multiple result sets with different structures - the kind that used to cause panic
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
RefID : "A" ,
JSON : [ ] byte ( ` {
"rawSql" : "SELECT id, name FROM test_multi_results WHERE id <= 2; SELECT id, value FROM test_multi_results WHERE id >= 2;" ,
"format" : "table"
} ` ) ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 1 * time . Hour ) ,
} ,
} ,
} ,
}
// This should not panic anymore, but should return an error instead
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-09-08 16:37:54 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
// We expect an error about column mismatch, not a panic
require . Error ( t , queryResult . Error )
require . Contains ( t , queryResult . Error . Error ( ) , "column name mismatch" )
} )
t . Run ( "Should return error for incompatible number of columns" , func ( t * testing . T ) {
// This query returns multiple result sets with different number of columns
// This should fix the error "runtime error: index out of range [1] with length 1"
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
RefID : "A" ,
JSON : [ ] byte ( ` {
"rawSql" : "SELECT id, name FROM test_multi_results WHERE id = 1; SELECT id FROM test_multi_results WHERE id = 1;" ,
"format" : "table"
} ` ) ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 1 * time . Hour ) ,
} ,
} ,
} ,
}
// This should not panic anymore, but should return an error instead
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-09-08 16:37:54 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
// We expect an error about incompatible result structure, not a panic
require . Error ( t , queryResult . Error )
require . Contains ( t , queryResult . Error . Error ( ) , "incompatible result structure: expected 2 columns, got 1 columns" )
} )
} )
t . Run ( "Should handle queries with mixed statement types" , func ( t * testing . T ) {
// This tests a scenario with UPDATE + SELECT that could cause the original panic
query := & backend . QueryDataRequest {
Queries : [ ] backend . DataQuery {
{
RefID : "A" ,
JSON : [ ] byte ( ` {
"rawSql" : "UPDATE test_multi_results SET name = 'updated' WHERE id = 1; SELECT id, name FROM test_multi_results WHERE id = 1;" ,
"format" : "table"
} ` ) ,
TimeRange : backend . TimeRange {
From : fromStart ,
To : fromStart . Add ( 1 * time . Hour ) ,
} ,
} ,
} ,
}
// This should not panic
2025-10-02 20:12:47 +08:00
resp , err := exe . QueryData ( t . Context ( ) , query )
2025-09-08 16:37:54 +08:00
require . NoError ( t , err )
queryResult := resp . Responses [ "A" ]
require . NoError ( t , queryResult . Error )
frames := queryResult . Frames
require . Len ( t , frames , 1 )
// Should only contain data from the SELECT part
frame := frames [ 0 ]
require . Equal ( t , 2 , len ( frame . Fields ) ) // id, name
require . Equal ( t , 1 , frame . Rows ( ) ) // 1 row
// Verify the update worked
nameField := frame . Fields [ 1 ]
nameValue := nameField . At ( 0 ) . ( * string )
require . Equal ( t , "updated" , * nameValue )
} )
2025-07-30 20:47:25 +08:00
} )
2025-09-26 22:23:40 +08:00
t . Run ( "Test Postgres connection with pgpass file" , func ( t * testing . T ) {
require . NoError ( t , preparePgpassFile ( t ) )
require . FileExists ( t , os . Getenv ( "PGPASSFILE" ) , "Make sure that PGPASSFILE is set and file exists" )
cnnstr := postgresTestDBConnString ( )
require . NotContains ( t , cnnstr , "password=" , "Make sure that password is not in the connection string" )
pgpassPool , _ , err := newPostgresPGX ( t . Context ( ) , "error" , 10000 , dsInfo , cnnstr , logger , backend . DataSourceInstanceSettings { } )
require . NoError ( t , err )
_ , err = pgpassPool . Query ( t . Context ( ) , "SELECT 1" ) // Test connection
require . NoError ( t , err )
} )
2025-07-30 20:47:25 +08:00
}