2020-11-19 20:17:00 +08:00
package expr
import (
"context"
"encoding/json"
2023-09-14 01:58:16 +08:00
"fmt"
2020-11-19 20:17:00 +08:00
"sort"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
2025-09-16 01:00:22 +08:00
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/testutil"
2020-11-19 20:17:00 +08:00
"github.com/stretchr/testify/require"
2022-06-28 00:23:15 +08:00
2024-06-13 12:11:35 +08:00
"github.com/grafana/grafana/pkg/apimachinery/errutil"
2025-04-11 02:51:44 +08:00
"github.com/grafana/grafana/pkg/expr/metrics"
2023-04-18 20:04:51 +08:00
"github.com/grafana/grafana/pkg/infra/tracing"
2023-06-08 19:59:51 +08:00
"github.com/grafana/grafana/pkg/plugins"
2022-06-28 00:23:15 +08:00
"github.com/grafana/grafana/pkg/services/datasources"
datafakes "github.com/grafana/grafana/pkg/services/datasources/fakes"
2025-08-19 18:37:56 +08:00
"github.com/grafana/grafana/pkg/services/dsquerierclient"
2023-04-13 00:24:34 +08:00
"github.com/grafana/grafana/pkg/services/featuremgmt"
2024-02-27 19:38:02 +08:00
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginconfig"
2023-06-08 19:59:51 +08:00
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
2023-09-11 19:59:24 +08:00
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
2023-06-08 19:59:51 +08:00
"github.com/grafana/grafana/pkg/services/user"
2022-06-28 00:23:15 +08:00
"github.com/grafana/grafana/pkg/setting"
2020-11-19 20:17:00 +08:00
)
func TestService ( t * testing . T ) {
dsDF := data . NewFrame ( "test" ,
2021-06-03 00:29:19 +08:00
data . NewField ( "time" , nil , [ ] time . Time { time . Unix ( 1 , 0 ) } ) ,
2025-02-06 20:27:28 +08:00
data . NewField ( "value" , data . Labels { "test" : "label" } , [ ] * float64 { fp ( 2 ) } ) ,
)
2020-11-19 20:17:00 +08:00
2025-02-06 20:27:28 +08:00
resp := map [ string ] backend . DataResponse {
"A" : { Frames : data . Frames { dsDF } } ,
2021-11-10 18:52:16 +08:00
}
2021-11-12 19:16:39 +08:00
2021-04-23 22:52:32 +08:00
queries := [ ] Query {
2020-11-19 20:17:00 +08:00
{
RefID : "A" ,
2022-06-28 00:23:15 +08:00
DataSource : & datasources . DataSource {
2023-02-03 00:22:43 +08:00
OrgID : 1 ,
UID : "test" ,
2021-12-17 00:51:46 +08:00
Type : "test" ,
} ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "1" }, "intervalMs": 1000, "maxDataPoints": 1000 } ` ) ,
2022-10-27 04:13:58 +08:00
TimeRange : AbsoluteTimeRange {
From : time . Time { } ,
To : time . Time { } ,
} ,
2020-11-19 20:17:00 +08:00
} ,
{
2021-12-17 00:51:46 +08:00
RefID : "B" ,
2023-06-17 01:05:06 +08:00
DataSource : dataSourceModel ( ) ,
2021-12-17 00:51:46 +08:00
JSON : json . RawMessage ( ` { "datasource": { "uid": "__expr__", "type": "__expr__"}, "type": "math", "expression": "$A * 2" } ` ) ,
2020-11-19 20:17:00 +08:00
} ,
}
2025-02-06 20:27:28 +08:00
s , req := newMockQueryService ( resp , queries )
2020-12-07 23:30:38 +08:00
2025-08-13 23:20:14 +08:00
pl , err := s . BuildPipeline ( t . Context ( ) , req )
2020-11-19 20:17:00 +08:00
require . NoError ( t , err )
2022-10-27 04:13:58 +08:00
res , err := s . ExecutePipeline ( context . Background ( ) , time . Now ( ) , pl )
2020-11-19 20:17:00 +08:00
require . NoError ( t , err )
bDF := data . NewFrame ( "" ,
2021-06-03 00:29:19 +08:00
data . NewField ( "Time" , nil , [ ] time . Time { time . Unix ( 1 , 0 ) } ) ,
2023-07-21 02:44:12 +08:00
data . NewField ( "B" , data . Labels { "test" : "label" } , [ ] * float64 { fp ( 4 ) } ) )
2020-11-19 20:17:00 +08:00
bDF . RefID = "B"
2023-04-13 00:24:34 +08:00
bDF . SetMeta ( & data . FrameMeta {
Type : data . FrameTypeTimeSeriesMulti ,
TypeVersion : data . FrameTypeVersion { 0 , 1 } ,
} )
2020-11-19 20:17:00 +08:00
expect := & backend . QueryDataResponse {
Responses : backend . Responses {
"A" : {
Frames : [ ] * data . Frame { dsDF } ,
} ,
"B" : {
Frames : [ ] * data . Frame { bDF } ,
} ,
} ,
}
// Service currently doesn't care about order of datas in the return.
trans := cmp . Transformer ( "Sort" , func ( in [ ] * data . Frame ) [ ] * data . Frame {
out := append ( [ ] * data . Frame ( nil ) , in ... ) // Copy input to avoid mutating it
sort . SliceStable ( out , func ( i , j int ) bool {
return out [ i ] . RefID > out [ j ] . RefID
} )
return out
} )
options := append ( [ ] cmp . Option { trans } , data . FrameTestCompareOptions ( ) ... )
if diff := cmp . Diff ( expect , res , options ... ) ; diff != "" {
t . Errorf ( "Result mismatch (-want +got):\n%s" , diff )
}
}
2023-09-14 01:58:16 +08:00
func TestDSQueryError ( t * testing . T ) {
2025-02-06 20:27:28 +08:00
resp := map [ string ] backend . DataResponse {
"A" : { Error : fmt . Errorf ( "womp womp" ) } ,
"B" : { Frames : data . Frames { } } ,
2023-09-14 01:58:16 +08:00
}
queries := [ ] Query {
{
RefID : "A" ,
DataSource : & datasources . DataSource {
OrgID : 1 ,
UID : "test" ,
Type : "test" ,
} ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "1" }, "intervalMs": 1000, "maxDataPoints": 1000 } ` ) ,
TimeRange : AbsoluteTimeRange {
From : time . Time { } ,
To : time . Time { } ,
} ,
} ,
{
RefID : "B" ,
DataSource : dataSourceModel ( ) ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "__expr__", "type": "__expr__"}, "type": "math", "expression": "$A * 2" } ` ) ,
} ,
{
RefID : "C" ,
DataSource : dataSourceModel ( ) ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "__expr__", "type": "__expr__"}, "type": "math", "expression": "42" } ` ) ,
} ,
}
2025-02-06 20:27:28 +08:00
s , req := newMockQueryService ( resp , queries )
2023-09-14 01:58:16 +08:00
2025-08-13 23:20:14 +08:00
pl , err := s . BuildPipeline ( t . Context ( ) , req )
2023-09-14 01:58:16 +08:00
require . NoError ( t , err )
2025-02-06 20:27:28 +08:00
res , err := s . ExecutePipeline ( context . Background ( ) , time . Now ( ) , pl )
2023-09-14 01:58:16 +08:00
require . NoError ( t , err )
var utilErr errutil . Error
2025-02-06 20:27:28 +08:00
require . ErrorContains ( t , res . Responses [ "A" ] . Error , "womp womp" )
require . ErrorAs ( t , res . Responses [ "B" ] . Error , & utilErr )
2023-09-14 01:58:16 +08:00
require . ErrorIs ( t , utilErr , DependencyError )
2025-02-06 20:27:28 +08:00
require . Equal ( t , fp ( 42 ) , res . Responses [ "C" ] . Frames [ 0 ] . Fields [ 0 ] . At ( 0 ) )
2023-09-14 01:58:16 +08:00
}
2025-09-03 02:00:14 +08:00
func TestParseError ( t * testing . T ) {
resp := map [ string ] backend . DataResponse { }
queries := [ ] Query {
{
RefID : "A" ,
DataSource : dataSourceModel ( ) ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "__expr__", "type": "__expr__"}, "type": "math", "expression": "asdf" } ` ) ,
} ,
}
s , req := newMockQueryService ( resp , queries )
_ , err := s . BuildPipeline ( t . Context ( ) , req )
require . ErrorContains ( t , err , "parse" )
require . ErrorContains ( t , err , "math" )
require . ErrorContains ( t , err , "asdf" )
}
2025-03-12 01:14:33 +08:00
func TestSQLExpressionCellLimitFromConfig ( t * testing . T ) {
tests := [ ] struct {
name string
configCellLimit int64
expectedLimit int64
} {
{
name : "should pass default cell limit (0) to SQL command" ,
configCellLimit : 0 ,
expectedLimit : 0 ,
} ,
{
name : "should pass custom cell limit to SQL command" ,
configCellLimit : 5000 ,
expectedLimit : 5000 ,
} ,
}
for _ , tt := range tests {
t . Run ( tt . name , func ( t * testing . T ) {
// Create a request with an SQL expression
sqlQuery := Query {
RefID : "A" ,
DataSource : dataSourceModel ( ) ,
JSON : json . RawMessage ( ` { "datasource": { "uid": "__expr__", "type": "__expr__"}, "type": "sql", "expression": "SELECT 1 AS n" } ` ) ,
TimeRange : AbsoluteTimeRange {
From : time . Time { } ,
To : time . Time { } ,
} ,
}
queries := [ ] Query { sqlQuery }
// Create service with specified cell limit
cfg := setting . NewCfg ( )
cfg . ExpressionsEnabled = true
cfg . SQLExpressionCellLimit = tt . configCellLimit
features := featuremgmt . WithFeatures ( featuremgmt . FlagSqlExpressions )
// Create service with our configured limit
s := & Service {
cfg : cfg ,
features : features ,
converter : & ResultConverter {
Features : features ,
} ,
2025-08-28 00:08:25 +08:00
tracer : & testTracer { } ,
2025-03-12 01:14:33 +08:00
}
req := & Request { Queries : queries , User : & user . SignedInUser { } }
// Build the pipeline
2025-08-13 23:20:14 +08:00
pipeline , err := s . BuildPipeline ( t . Context ( ) , req )
2025-03-12 01:14:33 +08:00
require . NoError ( t , err )
node := pipeline [ 0 ]
cmdNode := node . ( * CMDNode )
sqlCmd := cmdNode . Command . ( * SQLCommand )
2025-05-14 03:22:20 +08:00
// Verify the SQL command has the correct inputLimit
require . Equal ( t , tt . expectedLimit , sqlCmd . inputLimit , "SQL command has incorrect cell limit" )
2025-03-12 01:14:33 +08:00
} )
}
}
2020-11-19 20:17:00 +08:00
func fp ( f float64 ) * float64 {
return & f
}
type mockEndpoint struct {
2023-09-14 01:58:16 +08:00
Responses map [ string ] backend . DataResponse
2020-11-19 20:17:00 +08:00
}
2021-11-10 18:52:16 +08:00
func ( me * mockEndpoint ) QueryData ( ctx context . Context , req * backend . QueryDataRequest ) ( * backend . QueryDataResponse , error ) {
resp := backend . NewQueryDataResponse ( )
2023-09-14 01:58:16 +08:00
for _ , ref := range req . Queries {
resp . Responses [ ref . RefID ] = me . Responses [ ref . RefID ]
2021-11-10 18:52:16 +08:00
}
return resp , nil
2021-03-08 14:02:49 +08:00
}
2023-06-17 01:05:06 +08:00
func dataSourceModel ( ) * datasources . DataSource {
d , _ := DataSourceModelFromNodeType ( TypeCMDNode )
return d
}
2025-02-06 20:27:28 +08:00
func newMockQueryService ( responses map [ string ] backend . DataResponse , queries [ ] Query ) ( * Service , * Request ) {
me := & mockEndpoint {
Responses : responses ,
}
pCtxProvider := plugincontext . ProvideService ( setting . NewCfg ( ) , nil , & pluginstore . FakePluginStore {
PluginList : [ ] pluginstore . Plugin {
{ JSONData : plugins . JSONData { ID : "test" } } ,
} ,
} , & datafakes . FakeCacheService { } , & datafakes . FakeDataSourceService { } , nil , pluginconfig . NewFakePluginRequestConfigProvider ( ) )
features := featuremgmt . WithFeatures ( )
return & Service {
cfg : setting . NewCfg ( ) ,
dataService : me ,
pCtxProvider : pCtxProvider ,
features : featuremgmt . WithFeatures ( ) ,
tracer : tracing . InitializeTracerForTest ( ) ,
2025-04-11 02:51:44 +08:00
metrics : metrics . NewSSEMetrics ( nil ) ,
2025-02-06 20:27:28 +08:00
converter : & ResultConverter {
Features : features ,
Tracer : tracing . InitializeTracerForTest ( ) ,
} ,
2025-08-19 18:37:56 +08:00
qsDatasourceClientBuilder : dsquerierclient . NewNullQSDatasourceClientBuilder ( ) ,
2025-02-06 20:27:28 +08:00
} , & Request { Queries : queries , User : & user . SignedInUser { } }
}
2025-09-16 01:00:22 +08:00
func newMockQueryServiceWithMetricsRegistry (
responses map [ string ] backend . DataResponse ,
queries [ ] Query ,
reg * prometheus . Registry ,
) ( * Service , * Request ) {
s , req := newMockQueryService ( responses , queries )
// Replace the default metrics with a set bound to our private registry.
s . metrics = metrics . NewSSEMetrics ( reg )
return s , req
}
// Return the value of a prometheus counter with the given labels to test if it has been incremented, if the labels don't exist 0 will still be returned.
func counterVal ( t * testing . T , cv * prometheus . CounterVec , labels ... string ) float64 {
t . Helper ( )
ch , err := cv . GetMetricWithLabelValues ( labels ... )
require . NoError ( t , err )
return testutil . ToFloat64 ( ch )
}