AzureMonitor: Move Application Insights and Insight Analytics to a deprecated package (#45834)

This commit is contained in:
Andres Martinez Gotor 2022-03-02 06:41:07 -08:00 committed by GitHub
parent 3427ae463d
commit 700f6863f2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 523 additions and 387 deletions

View File

@ -0,0 +1,23 @@
package azlog
import "github.com/grafana/grafana/pkg/infra/log"
var (
azlog = log.New("tsdb.azuremonitor")
)
func Warn(msg string, args ...interface{}) {
azlog.Warn(msg, args)
}
func Debug(msg string, args ...interface{}) {
azlog.Debug(msg, args)
}
func Error(msg string, args ...interface{}) {
azlog.Error(msg, args)
}
func Info(msg string, args ...interface{}) {
azlog.Info(msg, args)
}

View File

@ -8,6 +8,9 @@ import (
"strings" "strings"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter" "github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
func getTarget(original string) (target string, err error) { func getTarget(original string) (target string, err error) {
@ -63,16 +66,16 @@ func (s *httpServiceProxy) Do(rw http.ResponseWriter, req *http.Request, cli *ht
return rw return rw
} }
func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (datasourceInfo, error) { func (s *Service) getDataSourceFromHTTPReq(req *http.Request) (types.DatasourceInfo, error) {
ctx := req.Context() ctx := req.Context()
pluginContext := httpadapter.PluginConfigFromContext(ctx) pluginContext := httpadapter.PluginConfigFromContext(ctx)
i, err := s.im.Get(pluginContext) i, err := s.im.Get(pluginContext)
if err != nil { if err != nil {
return datasourceInfo{}, nil return types.DatasourceInfo{}, nil
} }
ds, ok := i.(datasourceInfo) ds, ok := i.(types.DatasourceInfo)
if !ok { if !ok {
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance") return types.DatasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
} }
return ds, nil return ds, nil
} }
@ -111,7 +114,7 @@ func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseW
req.URL.Host = serviceURL.Host req.URL.Host = serviceURL.Host
req.URL.Scheme = serviceURL.Scheme req.URL.Scheme = serviceURL.Scheme
s.executors[subDataSource].resourceRequest(rw, req, service.HTTPClient) s.executors[subDataSource].ResourceRequest(rw, req, service.HTTPClient)
} }
} }
@ -120,8 +123,9 @@ func (s *Service) handleResourceReq(subDataSource string) func(rw http.ResponseW
func (s *Service) newResourceMux() *http.ServeMux { func (s *Service) newResourceMux() *http.ServeMux {
mux := http.NewServeMux() mux := http.NewServeMux()
mux.HandleFunc("/azuremonitor/", s.handleResourceReq(azureMonitor)) mux.HandleFunc("/azuremonitor/", s.handleResourceReq(azureMonitor))
mux.HandleFunc("/appinsights/", s.handleResourceReq(appInsights))
mux.HandleFunc("/loganalytics/", s.handleResourceReq(azureLogAnalytics)) mux.HandleFunc("/loganalytics/", s.handleResourceReq(azureLogAnalytics))
mux.HandleFunc("/resourcegraph/", s.handleResourceReq(azureResourceGraph)) mux.HandleFunc("/resourcegraph/", s.handleResourceReq(azureResourceGraph))
// Remove with Grafana 9
mux.HandleFunc("/appinsights/", s.handleResourceReq(deprecated.AppInsights))
return mux return mux
} }

View File

@ -7,6 +7,8 @@ import (
"testing" "testing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/metrics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -95,7 +97,7 @@ func Test_handleResourceReq(t *testing.T) {
proxy := &fakeProxy{} proxy := &fakeProxy{}
s := Service{ s := Service{
im: &fakeInstance{ im: &fakeInstance{
services: map[string]datasourceService{ services: map[string]types.DatasourceService{
azureMonitor: { azureMonitor: {
URL: routes[setting.AzurePublic][azureMonitor].URL, URL: routes[setting.AzurePublic][azureMonitor].URL,
HTTPClient: &http.Client{}, HTTPClient: &http.Client{},
@ -103,8 +105,8 @@ func Test_handleResourceReq(t *testing.T) {
}, },
}, },
executors: map[string]azDatasourceExecutor{ executors: map[string]azDatasourceExecutor{
azureMonitor: &AzureMonitorDatasource{ azureMonitor: &metrics.AzureMonitorDatasource{
proxy: proxy, Proxy: proxy,
}, },
}, },
} }

View File

@ -5,39 +5,38 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"net/http" "net/http"
"regexp"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource" "github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt" "github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter" "github.com/grafana/grafana-plugin-sdk-go/backend/resource/httpadapter"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
) "github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/metrics"
const ( "github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
timeSeries = "time_series" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
var (
azlog = log.New("tsdb.azuremonitor")
legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
) )
func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, tracer tracing.Tracer) *Service { func ProvideService(cfg *setting.Cfg, httpClientProvider *httpclient.Provider, tracer tracing.Tracer) *Service {
proxy := &httpServiceProxy{} proxy := &httpServiceProxy{}
executors := map[string]azDatasourceExecutor{ executors := map[string]azDatasourceExecutor{
azureMonitor: &AzureMonitorDatasource{proxy: proxy}, azureMonitor: &metrics.AzureMonitorDatasource{Proxy: proxy},
appInsights: &ApplicationInsightsDatasource{proxy: proxy}, azureLogAnalytics: &loganalytics.AzureLogAnalyticsDatasource{Proxy: proxy},
azureLogAnalytics: &AzureLogAnalyticsDatasource{proxy: proxy}, azureResourceGraph: &resourcegraph.AzureResourceGraphDatasource{Proxy: proxy},
insightsAnalytics: &InsightsAnalyticsDatasource{proxy: proxy},
azureResourceGraph: &AzureResourceGraphDatasource{proxy: proxy},
} }
// Insights Analytics and Application Insights were deprecated in Grafana 8.x and
// will be finally removed with Grafana 9
if setting.BuildVersion != "" && semver.MustParse(setting.BuildVersion).Compare(semver.MustParse("9.0.0-beta1")) < 0 {
executors[deprecated.InsightsAnalytics] = &deprecated.InsightsAnalyticsDatasource{Proxy: proxy}
executors[deprecated.AppInsights] = &deprecated.ApplicationInsightsDatasource{Proxy: proxy}
}
im := datasource.NewInstanceManager(NewInstanceSettings(cfg, *httpClientProvider, executors)) im := datasource.NewInstanceManager(NewInstanceSettings(cfg, *httpClientProvider, executors))
s := &Service{ s := &Service{
@ -60,10 +59,6 @@ func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceReq
return s.resourceHandler.CallResource(ctx, req, sender) return s.resourceHandler.CallResource(ctx, req, sender)
} }
type serviceProxy interface {
Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
}
type Service struct { type Service struct {
im instancemgmt.InstanceManager im instancemgmt.InstanceManager
executors map[string]azDatasourceExecutor executors map[string]azDatasourceExecutor
@ -73,37 +68,13 @@ type Service struct {
tracer tracing.Tracer tracer tracing.Tracer
} }
type azureMonitorSettings struct { func getDatasourceService(cfg *setting.Cfg, clientProvider httpclient.Provider, dsInfo types.DatasourceInfo, routeName string) (types.DatasourceService, error) {
SubscriptionId string `json:"subscriptionId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
AppInsightsAppId string `json:"appInsightsAppId"`
}
type datasourceInfo struct {
Cloud string
Credentials azcredentials.AzureCredentials
Settings azureMonitorSettings
Routes map[string]azRoute
Services map[string]datasourceService
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
type datasourceService struct {
URL string
HTTPClient *http.Client
}
func getDatasourceService(cfg *setting.Cfg, clientProvider httpclient.Provider, dsInfo datasourceInfo, routeName string) (datasourceService, error) {
route := dsInfo.Routes[routeName] route := dsInfo.Routes[routeName]
client, err := newHTTPClient(route, dsInfo, cfg, clientProvider) client, err := newHTTPClient(route, dsInfo, cfg, clientProvider)
if err != nil { if err != nil {
return datasourceService{}, err return types.DatasourceService{}, err
} }
return datasourceService{ return types.DatasourceService{
URL: dsInfo.Routes[routeName].URL, URL: dsInfo.Routes[routeName].URL,
HTTPClient: client, HTTPClient: client,
}, nil }, nil
@ -122,7 +93,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
return nil, fmt.Errorf("error reading settings: %w", err) return nil, fmt.Errorf("error reading settings: %w", err)
} }
azMonitorSettings := azureMonitorSettings{} azMonitorSettings := types.AzureMonitorSettings{}
err = json.Unmarshal(settings.JSONData, &azMonitorSettings) err = json.Unmarshal(settings.JSONData, &azMonitorSettings)
if err != nil { if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err) return nil, fmt.Errorf("error reading settings: %w", err)
@ -138,7 +109,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
return nil, fmt.Errorf("error getting credentials: %w", err) return nil, fmt.Errorf("error getting credentials: %w", err)
} }
model := datasourceInfo{ model := types.DatasourceInfo{
Cloud: cloud, Cloud: cloud,
Credentials: credentials, Credentials: credentials,
Settings: azMonitorSettings, Settings: azMonitorSettings,
@ -146,7 +117,7 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
DecryptedSecureJSONData: settings.DecryptedSecureJSONData, DecryptedSecureJSONData: settings.DecryptedSecureJSONData,
DatasourceID: settings.ID, DatasourceID: settings.ID,
Routes: routes[cloud], Routes: routes[cloud],
Services: map[string]datasourceService{}, Services: map[string]types.DatasourceService{},
} }
for routeName := range executors { for routeName := range executors {
@ -162,18 +133,18 @@ func NewInstanceSettings(cfg *setting.Cfg, clientProvider httpclient.Provider, e
} }
type azDatasourceExecutor interface { type azDatasourceExecutor interface {
executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error)
resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client)
} }
func (s *Service) getDataSourceFromPluginReq(req *backend.QueryDataRequest) (datasourceInfo, error) { func (s *Service) getDataSourceFromPluginReq(req *backend.QueryDataRequest) (types.DatasourceInfo, error) {
i, err := s.im.Get(req.PluginContext) i, err := s.im.Get(req.PluginContext)
if err != nil { if err != nil {
return datasourceInfo{}, err return types.DatasourceInfo{}, err
} }
dsInfo, ok := i.(datasourceInfo) dsInfo, ok := i.(types.DatasourceInfo)
if !ok { if !ok {
return datasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance") return types.DatasourceInfo{}, fmt.Errorf("unable to convert datasource from service instance")
} }
dsInfo.OrgID = req.PluginContext.OrgID dsInfo.OrgID = req.PluginContext.OrgID
return dsInfo, nil return dsInfo, nil
@ -194,7 +165,7 @@ func (s *Service) newQueryMux() *datasource.QueryTypeMux {
if !ok { if !ok {
return nil, fmt.Errorf("missing service for %s", dst) return nil, fmt.Errorf("missing service for %s", dst)
} }
return executor.executeTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer) return executor.ExecuteTimeSeriesQuery(ctx, req.Queries, dsInfo, service.HTTPClient, service.URL, s.tracer)
}) })
} }
return mux return mux

View File

@ -12,14 +12,45 @@ import (
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestProvideService(t *testing.T) {
t.Run("it should skip insight analytics and app insights with Grafana 9", func(t *testing.T) {
currentV := setting.BuildVersion
t.Cleanup(func() {
setting.BuildVersion = currentV
})
versions := []struct {
version string
shouldIncludeInsights bool
}{
{"8.5.0", true},
{"9.0.0-beta1", false},
{"9.0.0", false},
}
for _, v := range versions {
setting.BuildVersion = v.version
s := ProvideService(setting.NewCfg(), httpclient.NewProvider(), nil)
if v.shouldIncludeInsights {
assert.NotNil(t, s.executors[deprecated.InsightsAnalytics])
assert.NotNil(t, s.executors[deprecated.AppInsights])
} else {
assert.Nil(t, s.executors[deprecated.InsightsAnalytics])
assert.Nil(t, s.executors[deprecated.AppInsights])
}
}
})
}
func TestNewInstanceSettings(t *testing.T) { func TestNewInstanceSettings(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
settings backend.DataSourceInstanceSettings settings backend.DataSourceInstanceSettings
expectedModel datasourceInfo expectedModel types.DatasourceInfo
Err require.ErrorAssertionFunc Err require.ErrorAssertionFunc
}{ }{
{ {
@ -29,15 +60,15 @@ func TestNewInstanceSettings(t *testing.T) {
DecryptedSecureJSONData: map[string]string{"key": "value"}, DecryptedSecureJSONData: map[string]string{"key": "value"},
ID: 40, ID: 40,
}, },
expectedModel: datasourceInfo{ expectedModel: types.DatasourceInfo{
Cloud: setting.AzurePublic, Cloud: setting.AzurePublic,
Credentials: &azcredentials.AzureManagedIdentityCredentials{}, Credentials: &azcredentials.AzureManagedIdentityCredentials{},
Settings: azureMonitorSettings{}, Settings: types.AzureMonitorSettings{},
Routes: routes[setting.AzurePublic], Routes: routes[setting.AzurePublic],
JSONData: map[string]interface{}{"azureAuthType": "msi"}, JSONData: map[string]interface{}{"azureAuthType": "msi"},
DatasourceID: 40, DatasourceID: 40,
DecryptedSecureJSONData: map[string]string{"key": "value"}, DecryptedSecureJSONData: map[string]string{"key": "value"},
Services: map[string]datasourceService{}, Services: map[string]types.DatasourceService{},
}, },
Err: require.NoError, Err: require.NoError,
}, },
@ -62,12 +93,12 @@ func TestNewInstanceSettings(t *testing.T) {
} }
type fakeInstance struct { type fakeInstance struct {
routes map[string]azRoute routes map[string]types.AzRoute
services map[string]datasourceService services map[string]types.DatasourceService
} }
func (f *fakeInstance) Get(pluginContext backend.PluginContext) (instancemgmt.Instance, error) { func (f *fakeInstance) Get(pluginContext backend.PluginContext) (instancemgmt.Instance, error) {
return datasourceInfo{ return types.DatasourceInfo{
Routes: f.routes, Routes: f.routes,
Services: f.services, Services: f.services,
}, nil }, nil
@ -83,10 +114,10 @@ type fakeExecutor struct {
expectedURL string expectedURL string
} }
func (f *fakeExecutor) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (f *fakeExecutor) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
} }
func (f *fakeExecutor) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, func (f *fakeExecutor) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
if client == nil { if client == nil {
f.t.Errorf("The HTTP client for %s is missing", f.queryType) f.t.Errorf("The HTTP client for %s is missing", f.queryType)
@ -124,7 +155,7 @@ func Test_newMux(t *testing.T) {
s := &Service{ s := &Service{
im: &fakeInstance{ im: &fakeInstance{
routes: routes[azureMonitorPublic], routes: routes[azureMonitorPublic],
services: map[string]datasourceService{ services: map[string]types.DatasourceService{
tt.queryType: { tt.queryType: {
URL: routes[azureMonitorPublic][tt.queryType].URL, URL: routes[azureMonitorPublic][tt.queryType].URL,
HTTPClient: &http.Client{}, HTTPClient: &http.Client{},

View File

@ -1,4 +1,4 @@
package azuremonitor package deprecated
import ( import (
"context" "context"
@ -15,6 +15,9 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp" "golang.org/x/net/context/ctxhttp"
@ -22,7 +25,7 @@ import (
// ApplicationInsightsDatasource calls the application insights query API. // ApplicationInsightsDatasource calls the application insights query API.
type ApplicationInsightsDatasource struct { type ApplicationInsightsDatasource struct {
proxy serviceProxy Proxy types.ServiceProxy
} }
// ApplicationInsightsQuery is the model that holds the information // ApplicationInsightsQuery is the model that holds the information
@ -44,12 +47,12 @@ type ApplicationInsightsQuery struct {
aggregation string aggregation string
} }
func (e *ApplicationInsightsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *ApplicationInsightsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli) e.Proxy.Do(rw, req, cli)
} }
func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, func (e *ApplicationInsightsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
@ -93,7 +96,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery
// Previous versions of the query model don't specify a time grain, so we // Previous versions of the query model don't specify a time grain, so we
// need to fallback to a default value // need to fallback to a default value
if timeGrain == "auto" || timeGrain == "" { if timeGrain == "auto" || timeGrain == "" {
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains) timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -130,7 +133,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []backend.DataQuery
return applicationInsightsQueries, nil return applicationInsightsQueries, nil
} }
func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo datasourceInfo, client *http.Client, url string, tracer tracing.Tracer) ( func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery, dsInfo types.DatasourceInfo, client *http.Client, url string, tracer tracing.Tracer) (
backend.DataResponse, error) { backend.DataResponse, error) {
dataResponse := backend.DataResponse{} dataResponse := backend.DataResponse{}
@ -194,7 +197,7 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query
return dataResponse, nil return dataResponse, nil
} }
func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) { func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequest(http.MethodGet, url, nil) req, err := http.NewRequest(http.MethodGet, url, nil)
@ -221,7 +224,7 @@ func formatApplicationInsightsLegendKey(alias string, metricName string, labels
} }
keys = sort.StringSlice(keys) keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte { result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1) metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1) metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName)) metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))

View File

@ -1,4 +1,4 @@
package azuremonitor package deprecated
import ( import (
"context" "context"
@ -7,6 +7,7 @@ import (
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -204,8 +205,8 @@ func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
func TestAppInsightsCreateRequest(t *testing.T) { func TestAppInsightsCreateRequest(t *testing.T) {
ctx := context.Background() ctx := context.Background()
url := "http://ds" url := "http://ds"
dsInfo := datasourceInfo{ dsInfo := types.DatasourceInfo{
Settings: azureMonitorSettings{AppInsightsAppId: "foo"}, Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{ DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key", "appInsightsApiKey": "key",
}, },

View File

@ -1,4 +1,4 @@
package azuremonitor package deprecated
import ( import (
"encoding/json" "encoding/json"

View File

@ -1,4 +1,4 @@
package azuremonitor package deprecated
import ( import (
"encoding/json" "encoding/json"
@ -173,7 +173,7 @@ func TestInsightsMetricsResultToFrame(t *testing.T) {
func loadInsightsMetricsResponse(t *testing.T, name string) MetricsResult { func loadInsightsMetricsResponse(t *testing.T, name string) MetricsResult {
t.Helper() t.Helper()
path := filepath.Join("testdata", name) path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test // Ignore gosec warning G304 since it's a test
// nolint:gosec // nolint:gosec
f, err := os.Open(path) f, err := os.Open(path)

View File

@ -0,0 +1,20 @@
package deprecated
import (
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
)
func GetAppInsightsMiddleware(url, appInsightsApiKey string) httpclient.Middleware {
if appInsightsApiKey != "" && url == AzAppInsights.URL || url == AzChinaAppInsights.URL {
// Inject API-Key for AppInsights
return httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
req.Header.Set("X-API-Key", appInsightsApiKey)
return next.RoundTrip(req)
})
})
}
return nil
}

View File

@ -1,4 +1,4 @@
package azuremonitor package deprecated
import ( import (
"bytes" "bytes"
@ -13,13 +13,17 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp" "golang.org/x/net/context/ctxhttp"
) )
type InsightsAnalyticsDatasource struct { type InsightsAnalyticsDatasource struct {
proxy serviceProxy Proxy types.ServiceProxy
} }
type InsightsAnalyticsQuery struct { type InsightsAnalyticsQuery struct {
@ -34,12 +38,12 @@ type InsightsAnalyticsQuery struct {
Target string Target string
} }
func (e *InsightsAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *InsightsAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli) e.Proxy.Do(rw, req, cli)
} }
func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, func (e *InsightsAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context,
originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
@ -55,7 +59,7 @@ func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
return result, nil return result, nil
} }
func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*InsightsAnalyticsQuery, error) { func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*InsightsAnalyticsQuery, error) {
iaQueries := []*InsightsAnalyticsQuery{} iaQueries := []*InsightsAnalyticsQuery{}
for _, query := range queries { for _, query := range queries {
@ -74,7 +78,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return nil, fmt.Errorf("query is missing query string property") return nil, fmt.Errorf("query is missing query string property")
} }
qm.InterpolatedQuery, err = KqlInterpolate(query, dsInfo, qm.RawQuery) qm.InterpolatedQuery, err = macros.KqlInterpolate(query, dsInfo, qm.RawQuery)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -88,7 +92,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return iaQueries, nil return iaQueries, nil
} }
func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo datasourceInfo, client *http.Client, func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse { url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{} dataResponse := backend.DataResponse{}
@ -136,7 +140,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
azlog.Debug("Request failed", "status", res.Status, "body", string(body)) azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body)) return dataResponseError(fmt.Errorf("request failed, status: %s, body: %s", res.Status, body))
} }
var logResponse AzureLogAnalyticsResponse var logResponse loganalytics.AzureLogAnalyticsResponse
d := json.NewDecoder(bytes.NewReader(body)) d := json.NewDecoder(bytes.NewReader(body))
d.UseNumber() d.UseNumber()
err = d.Decode(&logResponse) err = d.Decode(&logResponse)
@ -149,12 +153,12 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
return dataResponseError(err) return dataResponseError(err)
} }
frame, err := ResponseTableToFrame(t) frame, err := loganalytics.ResponseTableToFrame(t)
if err != nil { if err != nil {
return dataResponseError(err) return dataResponseError(err)
} }
if query.ResultFormat == timeSeries { if query.ResultFormat == types.TimeSeries {
tsSchema := frame.TimeSeriesSchema() tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeLong { if tsSchema.Type == data.TimeSeriesTypeLong {
wideFrame, err := data.LongToWide(frame, nil) wideFrame, err := data.LongToWide(frame, nil)
@ -173,7 +177,7 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I
return dataResponse return dataResponse
} }
func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) { func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
appInsightsAppID := dsInfo.Settings.AppInsightsAppId appInsightsAppID := dsInfo.Settings.AppInsightsAppId
req, err := http.NewRequest(http.MethodGet, url, nil) req, err := http.NewRequest(http.MethodGet, url, nil)

View File

@ -1,18 +1,19 @@
package azuremonitor package deprecated
import ( import (
"context" "context"
"net/http" "net/http"
"testing" "testing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestInsightsAnalyticsCreateRequest(t *testing.T) { func TestInsightsAnalyticsCreateRequest(t *testing.T) {
ctx := context.Background() ctx := context.Background()
url := "http://ds" url := "http://ds"
dsInfo := datasourceInfo{ dsInfo := types.DatasourceInfo{
Settings: azureMonitorSettings{AppInsightsAppId: "foo"}, Settings: types.AzureMonitorSettings{AppInsightsAppId: "foo"},
DecryptedSecureJSONData: map[string]string{ DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "key", "appInsightsApiKey": "key",
}, },

View File

@ -0,0 +1,23 @@
package deprecated
import (
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// Azure cloud query types
const (
AppInsights = "Application Insights"
InsightsAnalytics = "Insights Analytics"
)
var AzAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.io",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var AzChinaAppInsights = types.AzRoute{
URL: "https://api.applicationinsights.azure.cn",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}

View File

@ -0,0 +1,72 @@
package deprecated
import (
"encoding/json"
"fmt"
"strings"
)
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query.
type insightsJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
// InsightsDimensions will unmarshal from a JSON string, or an array of strings,
// into a string array. This exists to support an older query format which is updated
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
dimensions := []string{}
for _, v := range sa {
if v == "none" || v == "None" {
continue
}
dimensions = append(dimensions, v)
}
*s = InsightsDimensions(dimensions)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
}
type insightsAnalyticsJSONQuery struct {
InsightsAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"insightsAnalytics"`
}

View File

@ -6,9 +6,11 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/aztokenprovider" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/aztokenprovider"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
func getMiddlewares(route azRoute, model datasourceInfo, cfg *setting.Cfg) ([]httpclient.Middleware, error) { func getMiddlewares(route types.AzRoute, model types.DatasourceInfo, cfg *setting.Cfg) ([]httpclient.Middleware, error) {
middlewares := []httpclient.Middleware{} middlewares := []httpclient.Middleware{}
if len(route.Scopes) > 0 { if len(route.Scopes) > 0 {
@ -19,21 +21,15 @@ func getMiddlewares(route azRoute, model datasourceInfo, cfg *setting.Cfg) ([]ht
middlewares = append(middlewares, aztokenprovider.AuthMiddleware(tokenProvider, route.Scopes)) middlewares = append(middlewares, aztokenprovider.AuthMiddleware(tokenProvider, route.Scopes))
} }
if _, ok := model.DecryptedSecureJSONData["appInsightsApiKey"]; ok && (route.URL == azAppInsights.URL || route.URL == azChinaAppInsights.URL) { // Remove with Grafana 9
// Inject API-Key for AppInsights if apiKeyMiddleware := deprecated.GetAppInsightsMiddleware(route.URL, model.DecryptedSecureJSONData["appInsightsApiKey"]); apiKeyMiddleware != nil {
apiKeyMiddleware := httpclient.MiddlewareFunc(func(opts httpclient.Options, next http.RoundTripper) http.RoundTripper {
return httpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
req.Header.Set("X-API-Key", model.DecryptedSecureJSONData["appInsightsApiKey"])
return next.RoundTrip(req)
})
})
middlewares = append(middlewares, apiKeyMiddleware) middlewares = append(middlewares, apiKeyMiddleware)
} }
return middlewares, nil return middlewares, nil
} }
func newHTTPClient(route azRoute, model datasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) { func newHTTPClient(route types.AzRoute, model types.DatasourceInfo, cfg *setting.Cfg, clientProvider httpclient.Provider) (*http.Client, error) {
m, err := getMiddlewares(route, model, cfg) m, err := getMiddlewares(route, model, cfg)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -5,6 +5,8 @@ import (
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials" "github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -12,18 +14,18 @@ func Test_httpCliProvider(t *testing.T) {
cfg := &setting.Cfg{} cfg := &setting.Cfg{}
tests := []struct { tests := []struct {
name string name string
route azRoute route types.AzRoute
model datasourceInfo model types.DatasourceInfo
expectedMiddlewares int expectedMiddlewares int
Err require.ErrorAssertionFunc Err require.ErrorAssertionFunc
}{ }{
{ {
name: "creates an HTTP client with a middleware due to the scope", name: "creates an HTTP client with a middleware due to the scope",
route: azRoute{ route: types.AzRoute{
URL: "http://route", URL: "http://route",
Scopes: []string{"http://route/.default"}, Scopes: []string{"http://route/.default"},
}, },
model: datasourceInfo{ model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{}, Credentials: &azcredentials.AzureClientSecretCredentials{},
}, },
expectedMiddlewares: 1, expectedMiddlewares: 1,
@ -31,11 +33,11 @@ func Test_httpCliProvider(t *testing.T) {
}, },
{ {
name: "creates an HTTP client with a middleware due to an app key", name: "creates an HTTP client with a middleware due to an app key",
route: azRoute{ route: types.AzRoute{
URL: azAppInsights.URL, URL: deprecated.AzAppInsights.URL,
Scopes: []string{}, Scopes: []string{},
}, },
model: datasourceInfo{ model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{}, Credentials: &azcredentials.AzureClientSecretCredentials{},
DecryptedSecureJSONData: map[string]string{ DecryptedSecureJSONData: map[string]string{
"appInsightsApiKey": "foo", "appInsightsApiKey": "foo",
@ -46,11 +48,11 @@ func Test_httpCliProvider(t *testing.T) {
}, },
{ {
name: "creates an HTTP client without a middleware", name: "creates an HTTP client without a middleware",
route: azRoute{ route: types.AzRoute{
URL: "http://route", URL: "http://route",
Scopes: []string{}, Scopes: []string{},
}, },
model: datasourceInfo{ model: types.DatasourceInfo{
Credentials: &azcredentials.AzureClientSecretCredentials{}, Credentials: &azcredentials.AzureClientSecretCredentials{},
}, },
expectedMiddlewares: 0, expectedMiddlewares: 0,

View File

@ -1,4 +1,4 @@
package azuremonitor package loganalytics
import ( import (
"bytes" "bytes"
@ -17,6 +17,9 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp" "golang.org/x/net/context/ctxhttp"
@ -24,7 +27,7 @@ import (
// AzureLogAnalyticsDatasource calls the Azure Log Analytics API's // AzureLogAnalyticsDatasource calls the Azure Log Analytics API's
type AzureLogAnalyticsDatasource struct { type AzureLogAnalyticsDatasource struct {
proxy serviceProxy Proxy types.ServiceProxy
} }
// AzureLogAnalyticsQuery is the query request that is built from the saved values for // AzureLogAnalyticsQuery is the query request that is built from the saved values for
@ -39,15 +42,15 @@ type AzureLogAnalyticsQuery struct {
TimeRange backend.TimeRange TimeRange backend.TimeRange
} }
func (e *AzureLogAnalyticsDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureLogAnalyticsDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli) e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query // 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, func (e *AzureLogAnalyticsDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
@ -63,7 +66,7 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
return result, nil return result, nil
} }
func getApiURL(queryJSONModel logJSONQuery) string { func getApiURL(queryJSONModel types.LogJSONQuery) string {
// Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric // Legacy queries only specify a Workspace GUID, which we need to use the old workspace-centric
// API URL for, and newer queries specifying a resource URI should use resource-centric API. // API URL for, and newer queries specifying a resource URI should use resource-centric API.
// However, legacy workspace queries using a `workspaces()` template variable will be resolved // However, legacy workspace queries using a `workspaces()` template variable will be resolved
@ -86,11 +89,11 @@ func getApiURL(queryJSONModel logJSONQuery) string {
} }
} }
func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureLogAnalyticsQuery, error) { func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureLogAnalyticsQuery, error) {
azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{} azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{}
for _, query := range queries { for _, query := range queries {
queryJSONModel := logJSONQuery{} queryJSONModel := types.LogJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Log Analytics query object from JSON: %w", err) return nil, fmt.Errorf("failed to decode the Azure Log Analytics query object from JSON: %w", err)
@ -101,13 +104,13 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
resultFormat := azureLogAnalyticsTarget.ResultFormat resultFormat := azureLogAnalyticsTarget.ResultFormat
if resultFormat == "" { if resultFormat == "" {
resultFormat = timeSeries resultFormat = types.TimeSeries
} }
apiURL := getApiURL(queryJSONModel) apiURL := getApiURL(queryJSONModel)
params := url.Values{} params := url.Values{}
rawQuery, err := KqlInterpolate(query, dsInfo, azureLogAnalyticsTarget.Query, "TimeGenerated") rawQuery, err := macros.KqlInterpolate(query, dsInfo, azureLogAnalyticsTarget.Query, "TimeGenerated")
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -127,7 +130,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []backend.DataQuery,
return azureLogAnalyticsQueries, nil return azureLogAnalyticsQueries, nil
} }
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo datasourceInfo, client *http.Client, func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse { url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{} dataResponse := backend.DataResponse{}
@ -204,7 +207,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
azlog.Warn("failed to add custom metadata to azure log analytics response", err) azlog.Warn("failed to add custom metadata to azure log analytics response", err)
} }
if query.ResultFormat == timeSeries { if query.ResultFormat == types.TimeSeries {
tsSchema := frame.TimeSeriesSchema() tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeLong { if tsSchema.Type == data.TimeSeriesTypeLong {
wideFrame, err := data.LongToWide(frame, nil) wideFrame, err := data.LongToWide(frame, nil)
@ -220,7 +223,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
return dataResponse return dataResponse
} }
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) { func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodGet, url, nil) req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil { if err != nil {
azlog.Debug("Failed to create request", "error", err) azlog.Debug("Failed to create request", "error", err)
@ -232,9 +235,14 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo
return req, nil return req, nil
} }
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables []types.AzureResponseTable `json:"tables"`
}
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an // GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
// error if there is no table by that name. // error if there is no table by that name.
func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*AzureResponseTable, error) { func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*types.AzureResponseTable, error) {
for _, t := range ar.Tables { for _, t := range ar.Tables {
if t.Name == "PrimaryResult" { if t.Name == "PrimaryResult" {
return &t, nil return &t, nil

View File

@ -1,4 +1,4 @@
package azuremonitor package loganalytics
import ( import (
"context" "context"
@ -12,6 +12,7 @@ import (
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -37,7 +38,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer", "query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
RefID: "A", RefID: "A",
TimeRange: timeRange, TimeRange: timeRange,
}, },
@ -45,7 +46,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{ {
RefID: "A", RefID: "A",
ResultFormat: timeSeries, ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query", URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{ JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics", "queryType": "Azure Log Analytics",
@ -54,7 +55,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer", "query": "query=Perf | where $__timeFilter() | where $__contains(Computer, 'comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, $__interval), Computer",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}}, Params: url.Values{"query": {"query=Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer"}},
Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer", Target: "query=query%3DPerf+%7C+where+%5B%27TimeGenerated%27%5D+%3E%3D+datetime%28%272018-03-15T13%3A00%3A00Z%27%29+and+%5B%27TimeGenerated%27%5D+%3C%3D+datetime%28%272018-03-15T13%3A34%3A00Z%27%29+%7C+where+%5B%27Computer%27%5D+in+%28%27comp1%27%2C%27comp2%27%29+%7C+summarize+avg%28CounterValue%29+by+bin%28TimeGenerated%2C+34000ms%29%2C+Computer",
TimeRange: timeRange, TimeRange: timeRange,
@ -74,14 +75,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
RefID: "A", RefID: "A",
}, },
}, },
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{ {
RefID: "A", RefID: "A",
ResultFormat: timeSeries, ResultFormat: types.TimeSeries,
URL: "v1/workspaces/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query", URL: "v1/workspaces/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/query",
JSON: []byte(fmt.Sprintf(`{ JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics", "queryType": "Azure Log Analytics",
@ -90,7 +91,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}}, Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf", Target: "query=query%3DPerf",
}, },
@ -109,14 +110,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
RefID: "A", RefID: "A",
}, },
}, },
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{ {
RefID: "A", RefID: "A",
ResultFormat: timeSeries, ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query", URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{ JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics", "queryType": "Azure Log Analytics",
@ -125,7 +126,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}}, Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf", Target: "query=query%3DPerf",
}, },
@ -144,14 +145,14 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
RefID: "A", RefID: "A",
}, },
}, },
azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{
{ {
RefID: "A", RefID: "A",
ResultFormat: timeSeries, ResultFormat: types.TimeSeries,
URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query", URL: "v1/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace/query",
JSON: []byte(fmt.Sprintf(`{ JSON: []byte(fmt.Sprintf(`{
"queryType": "Azure Log Analytics", "queryType": "Azure Log Analytics",
@ -160,7 +161,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
"query": "query=Perf", "query": "query=Perf",
"resultFormat": "%s" "resultFormat": "%s"
} }
}`, timeSeries)), }`, types.TimeSeries)),
Params: url.Values{"query": {"query=Perf"}}, Params: url.Values{"query": {"query=Perf"}},
Target: "query=query%3DPerf", Target: "query=query%3DPerf",
}, },
@ -171,7 +172,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{}) queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{})
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" { if diff := cmp.Diff(tt.azureLogAnalyticsQueries[0], queries[0]); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -183,7 +184,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
func TestLogAnalyticsCreateRequest(t *testing.T) { func TestLogAnalyticsCreateRequest(t *testing.T) {
ctx := context.Background() ctx := context.Background()
url := "http://ds" url := "http://ds"
dsInfo := datasourceInfo{} dsInfo := types.DatasourceInfo{}
tests := []struct { tests := []struct {
name string name string
@ -216,9 +217,9 @@ func TestLogAnalyticsCreateRequest(t *testing.T) {
func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) { func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
ds := AzureLogAnalyticsDatasource{} ds := AzureLogAnalyticsDatasource{}
dsInfo := datasourceInfo{ dsInfo := types.DatasourceInfo{
Services: map[string]datasourceService{ Services: map[string]types.DatasourceService{
azureLogAnalytics: {URL: "http://ds"}, "Azure Log Analytics": {URL: "http://ds"},
}, },
JSONData: map[string]interface{}{ JSONData: map[string]interface{}{
"azureLogAnalyticsSameAs": false, "azureLogAnalyticsSameAs": false,
@ -231,7 +232,7 @@ func Test_executeQueryErrorWithDifferentLogAnalyticsCreds(t *testing.T) {
} }
tracer, err := tracing.InitializeTracerForTest() tracer, err := tracing.InitializeTracerForTest()
require.NoError(t, err) require.NoError(t, err)
res := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services[azureLogAnalytics].URL, tracer) res := ds.executeQuery(ctx, query, dsInfo, &http.Client{}, dsInfo.Services["Azure Log Analytics"].URL, tracer)
if res.Error == nil { if res.Error == nil {
t.Fatal("expecting an error") t.Fatal("expecting an error")
} }

View File

@ -1,4 +1,4 @@
package azuremonitor package loganalytics
import ( import (
"encoding/json" "encoding/json"
@ -8,10 +8,11 @@ import (
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
) )
// ResponseTableToFrame converts an AzureResponseTable to a data.Frame. // ResponseTableToFrame converts an AzureResponseTable to a data.Frame.
func ResponseTableToFrame(table *AzureResponseTable) (*data.Frame, error) { func ResponseTableToFrame(table *types.AzureResponseTable) (*data.Frame, error) {
converterFrame, err := converterFrameForTable(table) converterFrame, err := converterFrameForTable(table)
if err != nil { if err != nil {
return nil, err return nil, err
@ -27,7 +28,7 @@ func ResponseTableToFrame(table *AzureResponseTable) (*data.Frame, error) {
return converterFrame.Frame, nil return converterFrame.Frame, nil
} }
func converterFrameForTable(t *AzureResponseTable) (*data.FrameInputConverter, error) { func converterFrameForTable(t *types.AzureResponseTable) (*data.FrameInputConverter, error) {
converters := []data.FieldConverter{} converters := []data.FieldConverter{}
colNames := make([]string, len(t.Columns)) colNames := make([]string, len(t.Columns))
colTypes := make([]string, len(t.Columns)) // for metadata colTypes := make([]string, len(t.Columns)) // for metadata

View File

@ -1,4 +1,4 @@
package azuremonitor package loganalytics
import ( import (
"encoding/json" "encoding/json"
@ -156,7 +156,7 @@ func TestLogTableToFrame(t *testing.T) {
func loadLogAnalyticsTestFileWithNumber(t *testing.T, name string) AzureLogAnalyticsResponse { func loadLogAnalyticsTestFileWithNumber(t *testing.T, name string) AzureLogAnalyticsResponse {
t.Helper() t.Helper()
path := filepath.Join("testdata", name) path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test // Ignore gosec warning G304 since it's a test
// nolint:gosec // nolint:gosec
f, err := os.Open(path) f, err := os.Open(path)

View File

@ -1,4 +1,4 @@
package azuremonitor package macros
import ( import (
"fmt" "fmt"
@ -9,6 +9,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval" "github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
) )
@ -31,7 +33,7 @@ type kqlMacroEngine struct {
// - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total' // - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total'
// KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries // KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
func KqlInterpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField ...string) (string, error) { func KqlInterpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField ...string) (string, error) {
engine := kqlMacroEngine{} engine := kqlMacroEngine{}
defaultTimeFieldForAllDatasources := "timestamp" defaultTimeFieldForAllDatasources := "timestamp"
@ -41,7 +43,7 @@ func KqlInterpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string,
return engine.Interpolate(query, dsInfo, kql, defaultTimeFieldForAllDatasources) return engine.Interpolate(query, dsInfo, kql, defaultTimeFieldForAllDatasources)
} }
func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo datasourceInfo, kql string, defaultTimeField string) (string, error) { func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField string) (string, error) {
m.timeRange = query.TimeRange m.timeRange = query.TimeRange
m.query = query m.query = query
rExp, _ := regexp.Compile(sExpr) rExp, _ := regexp.Compile(sExpr)
@ -86,7 +88,7 @@ func (m *kqlMacroEngine) Interpolate(query backend.DataQuery, dsInfo datasourceI
return kql, nil return kql, nil
} }
func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo datasourceInfo) (string, error) { func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, args []string, dsInfo types.DatasourceInfo) (string, error) {
switch name { switch name {
case "timeFilter": case "timeFilter":
timeColumn := defaultTimeField timeColumn := defaultTimeField

View File

@ -1,4 +1,4 @@
package azuremonitor package macros
import ( import (
"testing" "testing"
@ -7,6 +7,7 @@ import (
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts" "github.com/google/go-cmp/cmp/cmpopts"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -125,7 +126,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
defaultTimeField := "TimeGenerated" defaultTimeField := "TimeGenerated"
rawQuery, err := KqlInterpolate(tt.query, datasourceInfo{}, tt.kql, defaultTimeField) rawQuery, err := KqlInterpolate(tt.query, types.DatasourceInfo{}, tt.kql, defaultTimeField)
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" { if diff := cmp.Diff(tt.expected, rawQuery, cmpopts.EquateNaNs()); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)

View File

@ -1,4 +1,4 @@
package azuremonitor package metrics
import ( import (
"context" "context"
@ -16,6 +16,10 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/resourcegraph"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp" "golang.org/x/net/context/ctxhttp"
@ -23,28 +27,25 @@ import (
// AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported // AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported
type AzureMonitorDatasource struct { type AzureMonitorDatasource struct {
proxy serviceProxy Proxy types.ServiceProxy
} }
var ( var (
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
// Used to convert the aggregation value to the Azure enum for deep linking // Used to convert the aggregation value to the Azure enum for deep linking
aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7} aggregationTypeMap = map[string]int{"None": 0, "Total": 1, "Minimum": 2, "Maximum": 3, "Average": 4, "Count": 7}
) )
const azureMonitorAPIVersion = "2018-01-01" const azureMonitorAPIVersion = "2018-01-01"
func (e *AzureMonitorDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureMonitorDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli) e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. build the AzureMonitor url and querystring for each query // 1. build the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, func (e *AzureMonitorDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := backend.NewQueryDataResponse() result := backend.NewQueryDataResponse()
@ -60,12 +61,12 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori
return result, nil return result, nil
} }
func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureMonitorQuery, error) { func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*types.AzureMonitorQuery, error) {
azureMonitorQueries := []*AzureMonitorQuery{} azureMonitorQueries := []*types.AzureMonitorQuery{}
for _, query := range queries { for _, query := range queries {
var target string var target string
queryJSONModel := azureMonitorJSONQuery{} queryJSONModel := types.AzureMonitorJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err) return nil, fmt.Errorf("failed to decode the Azure Monitor query object from JSON: %w", err)
@ -93,7 +94,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
timeGrain := azJSONModel.TimeGrain timeGrain := azJSONModel.TimeGrain
timeGrains := azJSONModel.AllowedTimeGrainsMs timeGrains := azJSONModel.AllowedTimeGrainsMs
if timeGrain == "auto" { if timeGrain == "auto" {
timeGrain, err = setAutoTimeGrain(query.Interval.Milliseconds(), timeGrains) timeGrain, err = azTime.SetAutoTimeGrain(query.Interval.Milliseconds(), timeGrains)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -135,7 +136,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
azlog.Debug("Azuremonitor request", "params", params) azlog.Debug("Azuremonitor request", "params", params)
} }
azureMonitorQueries = append(azureMonitorQueries, &AzureMonitorQuery{ azureMonitorQueries = append(azureMonitorQueries, &types.AzureMonitorQuery{
URL: azureURL, URL: azureURL,
UrlComponents: urlComponents, UrlComponents: urlComponents,
Target: target, Target: target,
@ -149,7 +150,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []backend.DataQuery, dsInf
return azureMonitorQueries, nil return azureMonitorQueries, nil
} }
func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, dsInfo datasourceInfo, cli *http.Client, func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *types.AzureMonitorQuery, dsInfo types.DatasourceInfo, cli *http.Client,
url string, tracer tracing.Tracer) backend.DataResponse { url string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{} dataResponse := backend.DataResponse{}
@ -191,7 +192,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
return dataResponse return dataResponse
} }
azurePortalUrl, err := getAzurePortalUrl(dsInfo.Cloud) azurePortalUrl, err := resourcegraph.GetAzurePortalUrl(dsInfo.Cloud)
if err != nil { if err != nil {
dataResponse.Error = err dataResponse.Error = err
return dataResponse return dataResponse
@ -206,7 +207,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM
return dataResponse return dataResponse
} }
func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, url string) (*http.Request, error) { func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodGet, url, nil) req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil { if err != nil {
azlog.Debug("Failed to create request", "error", err) azlog.Debug("Failed to create request", "error", err)
@ -218,28 +219,28 @@ func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo datas
return req, nil return req, nil
} }
func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMonitorResponse, error) { func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (types.AzureMonitorResponse, error) {
body, err := ioutil.ReadAll(res.Body) body, err := ioutil.ReadAll(res.Body)
if err != nil { if err != nil {
return AzureMonitorResponse{}, err return types.AzureMonitorResponse{}, err
} }
if res.StatusCode/100 != 2 { if res.StatusCode/100 != 2 {
azlog.Debug("Request failed", "status", res.Status, "body", string(body)) azlog.Debug("Request failed", "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status) return types.AzureMonitorResponse{}, fmt.Errorf("request failed, status: %s", res.Status)
} }
var data AzureMonitorResponse var data types.AzureMonitorResponse
err = json.Unmarshal(body, &data) err = json.Unmarshal(body, &data)
if err != nil { if err != nil {
azlog.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body)) azlog.Debug("Failed to unmarshal AzureMonitor response", "error", err, "status", res.Status, "body", string(body))
return AzureMonitorResponse{}, err return types.AzureMonitorResponse{}, err
} }
return data, nil return data, nil
} }
func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) { func (e *AzureMonitorDatasource) parseResponse(amr types.AzureMonitorResponse, query *types.AzureMonitorQuery, azurePortalUrl string) (data.Frames, error) {
if len(amr.Value) == 0 { if len(amr.Value) == 0 {
return nil, nil return nil, nil
} }
@ -303,7 +304,7 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
frame.SetRow(i, point.TimeStamp, value) frame.SetRow(i, point.TimeStamp, value)
} }
frameWithLink := addConfigLinks(*frame, queryUrl) frameWithLink := resourcegraph.AddConfigLinks(*frame, queryUrl)
frames = append(frames, &frameWithLink) frames = append(frames, &frameWithLink)
} }
@ -311,7 +312,7 @@ func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *
} }
// Gets the deep link for the given query // Gets the deep link for the given query
func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error) { func getQueryUrl(query *types.AzureMonitorQuery, azurePortalUrl string) (string, error) {
aggregationType := aggregationTypeMap["Average"] aggregationType := aggregationTypeMap["Average"]
aggregation := query.Params.Get("aggregation") aggregation := query.Params.Get("aggregation")
if aggregation != "" { if aggregation != "" {
@ -343,7 +344,7 @@ func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error
chartDef, err := json.Marshal(map[string]interface{}{ chartDef, err := json.Marshal(map[string]interface{}{
"v2charts": []interface{}{ "v2charts": []interface{}{
map[string]interface{}{ map[string]interface{}{
"metrics": []metricChartDefinition{ "metrics": []types.MetricChartDefinition{
{ {
ResourceMetadata: map[string]string{ ResourceMetadata: map[string]string{
"id": id, "id": id,
@ -351,7 +352,7 @@ func getQueryUrl(query *AzureMonitorQuery, azurePortalUrl string) (string, error
Name: query.Params.Get("metricnames"), Name: query.Params.Get("metricnames"),
AggregationType: aggregationType, AggregationType: aggregationType,
Namespace: query.Params.Get("metricnamespace"), Namespace: query.Params.Get("metricnamespace"),
MetricVisualization: metricVisualization{ MetricVisualization: types.MetricVisualization{
DisplayName: query.Params.Get("metricnames"), DisplayName: query.Params.Get("metricnames"),
ResourceDisplayName: query.UrlComponents["resourceName"], ResourceDisplayName: query.UrlComponents["resourceName"],
}, },
@ -387,7 +388,7 @@ func formatAzureMonitorLegendKey(alias string, resourceName string, metricName s
} }
keys = sort.StringSlice(keys) keys = sort.StringSlice(keys)
result := legendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte { result := types.LegendKeyFormat.ReplaceAllFunc([]byte(alias), func(in []byte) []byte {
metaPartName := strings.Replace(string(in), "{{", "", 1) metaPartName := strings.Replace(string(in), "{{", "", 1)
metaPartName = strings.Replace(metaPartName, "}}", "", 1) metaPartName = strings.Replace(metaPartName, "}}", "", 1)
metaPartName = strings.ToLower(strings.TrimSpace(metaPartName)) metaPartName = strings.ToLower(strings.TrimSpace(metaPartName))

View File

@ -1,4 +1,4 @@
package azuremonitor package metrics
import ( import (
"context" "context"
@ -16,14 +16,16 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
azTime "github.com/grafana/grafana/pkg/tsdb/azuremonitor/time"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
ptr "github.com/xorcare/pointer" ptr "github.com/xorcare/pointer"
) )
func TestAzureMonitorBuildQueries(t *testing.T) { func TestAzureMonitorBuildQueries(t *testing.T) {
datasource := &AzureMonitorDatasource{} datasource := &AzureMonitorDatasource{}
dsInfo := datasourceInfo{ dsInfo := types.DatasourceInfo{
Settings: azureMonitorSettings{ Settings: types.AzureMonitorSettings{
SubscriptionId: "default-subscription", SubscriptionId: "default-subscription",
}, },
} }
@ -96,7 +98,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
name: "has dimensionFilter*s* property with one dimension", name: "has dimensionFilter*s* property with one dimension",
azureMonitorVariedProperties: map[string]interface{}{ azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M", "timeGrain": "PT1M",
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}}, "dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: "*"}},
"top": "30", "top": "30",
}, },
queryInterval: duration, queryInterval: duration,
@ -107,7 +109,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
name: "has dimensionFilter*s* property with two dimensions", name: "has dimensionFilter*s* property with two dimensions",
azureMonitorVariedProperties: map[string]interface{}{ azureMonitorVariedProperties: map[string]interface{}{
"timeGrain": "PT1M", "timeGrain": "PT1M",
"dimensionFilters": []azureMonitorDimensionFilter{{"blob", "eq", "*"}, {"tier", "eq", "*"}}, "dimensionFilters": []types.AzureMonitorDimensionFilter{{Dimension: "blob", Operator: "eq", Filter: "*"}, {Dimension: "tier", Operator: "eq", Filter: "*"}},
"top": "30", "top": "30",
}, },
queryInterval: duration, queryInterval: duration,
@ -149,7 +151,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
}, },
} }
azureMonitorQuery := &AzureMonitorQuery{ azureMonitorQuery := &types.AzureMonitorQuery{
URL: "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics", URL: "12345678-aaaa-bbbb-cccc-123456789abc/resourceGroups/grafanastaging/providers/Microsoft.Compute/virtualMachines/grafana/providers/microsoft.insights/metrics",
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"metricDefinition": "Microsoft.Compute/virtualMachines", "metricDefinition": "Microsoft.Compute/virtualMachines",
@ -168,7 +170,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) {
queries, err := datasource.buildQueries(tsdbQuery, dsInfo) queries, err := datasource.buildQueries(tsdbQuery, dsInfo)
require.NoError(t, err) require.NoError(t, err)
if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" { if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(types.AzureMonitorQuery{}, "Params")); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
} }
@ -219,14 +221,14 @@ func TestAzureMonitorParseResponse(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
responseFile string responseFile string
mockQuery *AzureMonitorQuery mockQuery *types.AzureMonitorQuery
expectedFrames data.Frames expectedFrames data.Frames
queryIntervalMS int64 queryIntervalMS int64
}{ }{
{ {
name: "average aggregate time series response", name: "average aggregate time series response",
responseFile: "1-azure-monitor-response-avg.json", responseFile: "1-azure-monitor-response-avg.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -247,7 +249,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "total aggregate time series response", name: "total aggregate time series response",
responseFile: "2-azure-monitor-response-total.json", responseFile: "2-azure-monitor-response-total.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -268,7 +270,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "maximum aggregate time series response", name: "maximum aggregate time series response",
responseFile: "3-azure-monitor-response-maximum.json", responseFile: "3-azure-monitor-response-maximum.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -289,7 +291,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "minimum aggregate time series response", name: "minimum aggregate time series response",
responseFile: "4-azure-monitor-response-minimum.json", responseFile: "4-azure-monitor-response-minimum.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -310,7 +312,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "count aggregate time series response", name: "count aggregate time series response",
responseFile: "5-azure-monitor-response-count.json", responseFile: "5-azure-monitor-response-count.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -331,7 +333,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "single dimension time series response", name: "single dimension time series response",
responseFile: "6-azure-monitor-response-single-dimension.json", responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
}, },
@ -365,7 +367,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "with alias patterns in the query", name: "with alias patterns in the query",
responseFile: "2-azure-monitor-response-total.json", responseFile: "2-azure-monitor-response-total.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}", Alias: "custom {{resourcegroup}} {{namespace}} {{resourceName}} {{metric}}",
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
@ -387,7 +389,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "single dimension with alias", name: "single dimension with alias",
responseFile: "6-azure-monitor-response-single-dimension.json", responseFile: "6-azure-monitor-response-single-dimension.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
Alias: "{{dimensionname}}={{DimensionValue}}", Alias: "{{dimensionname}}={{DimensionValue}}",
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
@ -424,7 +426,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "multiple dimension time series response with label alias", name: "multiple dimension time series response with label alias",
responseFile: "7-azure-monitor-response-multi-dimension.json", responseFile: "7-azure-monitor-response-multi-dimension.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}", Alias: "{{resourcegroup}} {Blob Type={{blobtype}}, Tier={{Tier}}}",
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
@ -462,7 +464,7 @@ func TestAzureMonitorParseResponse(t *testing.T) {
{ {
name: "unspecified unit with alias should not panic", name: "unspecified unit with alias should not panic",
responseFile: "8-azure-monitor-response-unspecified-unit.json", responseFile: "8-azure-monitor-response-unspecified-unit.json",
mockQuery: &AzureMonitorQuery{ mockQuery: &types.AzureMonitorQuery{
Alias: "custom", Alias: "custom",
UrlComponents: map[string]string{ UrlComponents: map[string]string{
"resourceName": "grafana", "resourceName": "grafana",
@ -540,22 +542,22 @@ func TestFindClosestAllowIntervalMS(t *testing.T) {
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
interval := findClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains) interval := azTime.FindClosestAllowedIntervalMS(tt.inputInterval, tt.allowedTimeGrains)
require.Equal(t, tt.expectedInterval, interval) require.Equal(t, tt.expectedInterval, interval)
}) })
} }
} }
func loadTestFile(t *testing.T, name string) AzureMonitorResponse { func loadTestFile(t *testing.T, name string) types.AzureMonitorResponse {
t.Helper() t.Helper()
path := filepath.Join("testdata", name) path := filepath.Join("../testdata", name)
// Ignore gosec warning G304 since it's a test // Ignore gosec warning G304 since it's a test
// nolint:gosec // nolint:gosec
jsonBody, err := ioutil.ReadFile(path) jsonBody, err := ioutil.ReadFile(path)
require.NoError(t, err) require.NoError(t, err)
var azData AzureMonitorResponse var azData types.AzureMonitorResponse
err = json.Unmarshal(jsonBody, &azData) err = json.Unmarshal(jsonBody, &azData)
require.NoError(t, err) require.NoError(t, err)
return azData return azData
@ -563,7 +565,7 @@ func loadTestFile(t *testing.T, name string) AzureMonitorResponse {
func TestAzureMonitorCreateRequest(t *testing.T) { func TestAzureMonitorCreateRequest(t *testing.T) {
ctx := context.Background() ctx := context.Background()
dsInfo := datasourceInfo{} dsInfo := types.DatasourceInfo{}
url := "http://ds/" url := "http://ds/"
tests := []struct { tests := []struct {

View File

@ -1,4 +1,4 @@
package azuremonitor package metrics
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package azuremonitor package metrics
import ( import (
"testing" "testing"

View File

@ -1,4 +1,4 @@
package azuremonitor package resourcegraph
import ( import (
"bytes" "bytes"
@ -17,14 +17,23 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/tracing" "github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azlog"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/loganalytics"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/macros"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
"golang.org/x/net/context/ctxhttp" "golang.org/x/net/context/ctxhttp"
) )
// AzureResourceGraphResponse is the json response object from the Azure Resource Graph Analytics API.
type AzureResourceGraphResponse struct {
Data types.AzureResponseTable `json:"data"`
}
// AzureResourceGraphDatasource calls the Azure Resource Graph API's // AzureResourceGraphDatasource calls the Azure Resource Graph API's
type AzureResourceGraphDatasource struct { type AzureResourceGraphDatasource struct {
proxy serviceProxy Proxy types.ServiceProxy
} }
// AzureResourceGraphQuery is the query request that is built from the saved values for // AzureResourceGraphQuery is the query request that is built from the saved values for
@ -41,15 +50,15 @@ type AzureResourceGraphQuery struct {
const argAPIVersion = "2021-06-01-preview" const argAPIVersion = "2021-06-01-preview"
const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources" const argQueryProviderName = "/providers/Microsoft.ResourceGraph/resources"
func (e *AzureResourceGraphDatasource) resourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) { func (e *AzureResourceGraphDatasource) ResourceRequest(rw http.ResponseWriter, req *http.Request, cli *http.Client) {
e.proxy.Do(rw, req, cli) e.Proxy.Do(rw, req, cli)
} }
// executeTimeSeriesQuery does the following: // executeTimeSeriesQuery does the following:
// 1. builds the AzureMonitor url and querystring for each query // 1. builds the AzureMonitor url and querystring for each query
// 2. executes each query by calling the Azure Monitor API // 2. executes each query by calling the Azure Monitor API
// 3. parses the responses for each query into data frames // 3. parses the responses for each query into data frames
func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo datasourceInfo, client *http.Client, func (e *AzureResourceGraphDatasource) ExecuteTimeSeriesQuery(ctx context.Context, originalQueries []backend.DataQuery, dsInfo types.DatasourceInfo, client *http.Client,
url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) { url string, tracer tracing.Tracer) (*backend.QueryDataResponse, error) {
result := &backend.QueryDataResponse{ result := &backend.QueryDataResponse{
Responses: map[string]backend.DataResponse{}, Responses: map[string]backend.DataResponse{},
@ -67,7 +76,14 @@ func (e *AzureResourceGraphDatasource) executeTimeSeriesQuery(ctx context.Contex
return result, nil return result, nil
} }
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo datasourceInfo) ([]*AzureResourceGraphQuery, error) { type argJSONQuery struct {
AzureResourceGraph struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"azureResourceGraph"`
}
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery var azureResourceGraphQueries []*AzureResourceGraphQuery
for _, query := range queries { for _, query := range queries {
@ -85,7 +101,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
resultFormat = "table" resultFormat = "table"
} }
interpolatedQuery, err := KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query) interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil { if err != nil {
return nil, err return nil, err
@ -103,7 +119,7 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
return azureResourceGraphQueries, nil return azureResourceGraphQueries, nil
} }
func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo datasourceInfo, client *http.Client, func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *AzureResourceGraphQuery, dsInfo types.DatasourceInfo, client *http.Client,
dsURL string, tracer tracing.Tracer) backend.DataResponse { dsURL string, tracer tracing.Tracer) backend.DataResponse {
dataResponse := backend.DataResponse{} dataResponse := backend.DataResponse{}
@ -173,18 +189,18 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
return dataResponseErrorWithExecuted(err) return dataResponseErrorWithExecuted(err)
} }
frame, err := ResponseTableToFrame(&argResponse.Data) frame, err := loganalytics.ResponseTableToFrame(&argResponse.Data)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return dataResponseErrorWithExecuted(err)
} }
azurePortalUrl, err := getAzurePortalUrl(dsInfo.Cloud) azurePortalUrl, err := GetAzurePortalUrl(dsInfo.Cloud)
if err != nil { if err != nil {
return dataResponseErrorWithExecuted(err) return dataResponseErrorWithExecuted(err)
} }
url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery) url := azurePortalUrl + "/#blade/HubsExtension/ArgQueryBlade/query/" + url.PathEscape(query.InterpolatedQuery)
frameWithLink := addConfigLinks(*frame, url) frameWithLink := AddConfigLinks(*frame, url)
if frameWithLink.Meta == nil { if frameWithLink.Meta == nil {
frameWithLink.Meta = &data.FrameMeta{} frameWithLink.Meta = &data.FrameMeta{}
} }
@ -194,7 +210,7 @@ func (e *AzureResourceGraphDatasource) executeQuery(ctx context.Context, query *
return dataResponse return dataResponse
} }
func addConfigLinks(frame data.Frame, dl string) data.Frame { func AddConfigLinks(frame data.Frame, dl string) data.Frame {
for i := range frame.Fields { for i := range frame.Fields {
if frame.Fields[i].Config == nil { if frame.Fields[i].Config == nil {
frame.Fields[i].Config = &data.FieldConfig{} frame.Fields[i].Config = &data.FieldConfig{}
@ -209,7 +225,7 @@ func addConfigLinks(frame data.Frame, dl string) data.Frame {
return frame return frame
} }
func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo datasourceInfo, reqBody []byte, url string) (*http.Request, error) { func (e *AzureResourceGraphDatasource) createRequest(ctx context.Context, dsInfo types.DatasourceInfo, reqBody []byte, url string) (*http.Request, error) {
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(reqBody)) req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(reqBody))
if err != nil { if err != nil {
azlog.Debug("Failed to create request", "error", err) azlog.Debug("Failed to create request", "error", err)
@ -250,7 +266,7 @@ func (e *AzureResourceGraphDatasource) unmarshalResponse(res *http.Response) (Az
return data, nil return data, nil
} }
func getAzurePortalUrl(azureCloud string) (string, error) { func GetAzurePortalUrl(azureCloud string) (string, error) {
switch azureCloud { switch azureCloud {
case setting.AzurePublic: case setting.AzurePublic:
return "https://portal.azure.com", nil return "https://portal.azure.com", nil

View File

@ -1,4 +1,4 @@
package azuremonitor package resourcegraph
import ( import (
"context" "context"
@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -68,7 +69,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
queries, err := datasource.buildQueries(tt.queryModel, datasourceInfo{}) queries, err := datasource.buildQueries(tt.queryModel, types.DatasourceInfo{})
tt.Err(t, err) tt.Err(t, err)
if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(simplejson.Json{})); diff != "" { if diff := cmp.Diff(tt.azureResourceGraphQueries, queries, cmpopts.IgnoreUnexported(simplejson.Json{})); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
@ -80,7 +81,7 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
func TestAzureResourceGraphCreateRequest(t *testing.T) { func TestAzureResourceGraphCreateRequest(t *testing.T) {
ctx := context.Background() ctx := context.Background()
url := "http://ds" url := "http://ds"
dsInfo := datasourceInfo{} dsInfo := types.DatasourceInfo{}
tests := []struct { tests := []struct {
name string name string
@ -120,7 +121,7 @@ func TestAddConfigData(t *testing.T) {
frame := data.Frame{ frame := data.Frame{
Fields: []*data.Field{&field}, Fields: []*data.Field{&field},
} }
frameWithLink := addConfigLinks(frame, "http://ds") frameWithLink := AddConfigLinks(frame, "http://ds")
expectedFrameWithLink := data.Frame{ expectedFrameWithLink := data.Frame{
Fields: []*data.Field{ Fields: []*data.Field{
{ {
@ -145,7 +146,7 @@ func TestGetAzurePortalUrl(t *testing.T) {
} }
for _, cloud := range clouds { for _, cloud := range clouds {
azurePortalUrl, err := getAzurePortalUrl(cloud) azurePortalUrl, err := GetAzurePortalUrl(cloud)
if err != nil { if err != nil {
t.Errorf("The cloud not supported") t.Errorf("The cloud not supported")
} }

View File

@ -1,71 +1,55 @@
package azuremonitor package azuremonitor
import "github.com/grafana/grafana/pkg/setting" import (
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/deprecated"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
)
// Azure cloud query types // Azure cloud query types
const ( const (
azureMonitor = "Azure Monitor" azureMonitor = "Azure Monitor"
appInsights = "Application Insights"
azureLogAnalytics = "Azure Log Analytics" azureLogAnalytics = "Azure Log Analytics"
insightsAnalytics = "Insights Analytics"
azureResourceGraph = "Azure Resource Graph" azureResourceGraph = "Azure Resource Graph"
) )
type azRoute struct { var azManagement = types.AzRoute{
URL string
Scopes []string
Headers map[string]string
}
var azManagement = azRoute{
URL: "https://management.azure.com", URL: "https://management.azure.com",
Scopes: []string{"https://management.azure.com/.default"}, Scopes: []string{"https://management.azure.com/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"}, Headers: map[string]string{"x-ms-app": "Grafana"},
} }
var azUSGovManagement = azRoute{ var azUSGovManagement = types.AzRoute{
URL: "https://management.usgovcloudapi.net", URL: "https://management.usgovcloudapi.net",
Scopes: []string{"https://management.usgovcloudapi.net/.default"}, Scopes: []string{"https://management.usgovcloudapi.net/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"}, Headers: map[string]string{"x-ms-app": "Grafana"},
} }
var azGermanyManagement = azRoute{ var azGermanyManagement = types.AzRoute{
URL: "https://management.microsoftazure.de", URL: "https://management.microsoftazure.de",
Scopes: []string{"https://management.microsoftazure.de/.default"}, Scopes: []string{"https://management.microsoftazure.de/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"}, Headers: map[string]string{"x-ms-app": "Grafana"},
} }
var azChinaManagement = azRoute{ var azChinaManagement = types.AzRoute{
URL: "https://management.chinacloudapi.cn", URL: "https://management.chinacloudapi.cn",
Scopes: []string{"https://management.chinacloudapi.cn/.default"}, Scopes: []string{"https://management.chinacloudapi.cn/.default"},
Headers: map[string]string{"x-ms-app": "Grafana"}, Headers: map[string]string{"x-ms-app": "Grafana"},
} }
var azAppInsights = azRoute{ var azLogAnalytics = types.AzRoute{
URL: "https://api.applicationinsights.io",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azChinaAppInsights = azRoute{
URL: "https://api.applicationinsights.azure.cn",
Scopes: []string{},
Headers: map[string]string{"x-ms-app": "Grafana"},
}
var azLogAnalytics = azRoute{
URL: "https://api.loganalytics.io", URL: "https://api.loganalytics.io",
Scopes: []string{"https://api.loganalytics.io/.default"}, Scopes: []string{"https://api.loganalytics.io/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"}, Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
} }
var azChinaLogAnalytics = azRoute{ var azChinaLogAnalytics = types.AzRoute{
URL: "https://api.loganalytics.azure.cn", URL: "https://api.loganalytics.azure.cn",
Scopes: []string{"https://api.loganalytics.azure.cn/.default"}, Scopes: []string{"https://api.loganalytics.azure.cn/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"}, Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
} }
var azUSGovLogAnalytics = azRoute{ var azUSGovLogAnalytics = types.AzRoute{
URL: "https://api.loganalytics.us", URL: "https://api.loganalytics.us",
Scopes: []string{"https://api.loganalytics.us/.default"}, Scopes: []string{"https://api.loganalytics.us/.default"},
Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"}, Headers: map[string]string{"x-ms-app": "Grafana", "Cache-Control": "public, max-age=60"},
@ -74,13 +58,13 @@ var azUSGovLogAnalytics = azRoute{
var ( var (
// The different Azure routes are identified by its cloud (e.g. public or gov) // The different Azure routes are identified by its cloud (e.g. public or gov)
// and the service to query (e.g. Azure Monitor or Azure Log Analytics) // and the service to query (e.g. Azure Monitor or Azure Log Analytics)
routes = map[string]map[string]azRoute{ routes = map[string]map[string]types.AzRoute{
setting.AzurePublic: { setting.AzurePublic: {
azureMonitor: azManagement, azureMonitor: azManagement,
azureLogAnalytics: azLogAnalytics, azureLogAnalytics: azLogAnalytics,
azureResourceGraph: azManagement, azureResourceGraph: azManagement,
appInsights: azAppInsights, deprecated.AppInsights: deprecated.AzAppInsights,
insightsAnalytics: azAppInsights, deprecated.InsightsAnalytics: deprecated.AzAppInsights,
}, },
setting.AzureUSGovernment: { setting.AzureUSGovernment: {
azureMonitor: azUSGovManagement, azureMonitor: azUSGovManagement,
@ -94,8 +78,8 @@ var (
azureMonitor: azChinaManagement, azureMonitor: azChinaManagement,
azureLogAnalytics: azChinaLogAnalytics, azureLogAnalytics: azChinaLogAnalytics,
azureResourceGraph: azChinaManagement, azureResourceGraph: azChinaManagement,
appInsights: azChinaAppInsights, deprecated.AppInsights: deprecated.AzChinaAppInsights,
insightsAnalytics: azChinaAppInsights, deprecated.InsightsAnalytics: deprecated.AzChinaAppInsights,
}, },
} }
) )

View File

@ -1,10 +1,15 @@
package azuremonitor package time
// setAutoTimeGrain tries to find the closest interval to the query's intervalMs value var (
// 1m, 5m, 15m, 30m, 1h, 6h, 12h, 1d in milliseconds
defaultAllowedIntervalsMS = []int64{60000, 300000, 900000, 1800000, 3600000, 21600000, 43200000, 86400000}
)
// SetAutoTimeGrain tries to find the closest interval to the query's intervalMs value
// if the metric has a limited set of possible intervals/time grains then use those // if the metric has a limited set of possible intervals/time grains then use those
// instead of the default list of intervals // instead of the default list of intervals
func setAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) { func SetAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
autoInterval := findClosestAllowedIntervalMS(intervalMs, timeGrains) autoInterval := FindClosestAllowedIntervalMS(intervalMs, timeGrains)
tg := &TimeGrain{} tg := &TimeGrain{}
autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval) autoTimeGrain, err := tg.createISO8601DurationFromIntervalMS(autoInterval)
if err != nil { if err != nil {
@ -14,12 +19,12 @@ func setAutoTimeGrain(intervalMs int64, timeGrains []int64) (string, error) {
return autoTimeGrain, nil return autoTimeGrain, nil
} }
// findClosestAllowedIntervalMs is used for the auto time grain setting. // FindClosestAllowedIntervalMS is used for the auto time grain setting.
// It finds the closest time grain from the list of allowed time grains for Azure Monitor // It finds the closest time grain from the list of allowed time grains for Azure Monitor
// using the Grafana interval in milliseconds // using the Grafana interval in milliseconds
// Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter // Some metrics only allow a limited list of time grains. The allowedTimeGrains parameter
// allows overriding the default list of allowed time grains. // allows overriding the default list of allowed time grains.
func findClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 { func FindClosestAllowedIntervalMS(intervalMs int64, allowedTimeGrains []int64) int64 {
allowedIntervals := defaultAllowedIntervalsMS allowedIntervals := defaultAllowedIntervalsMS
if len(allowedTimeGrains) > 0 { if len(allowedTimeGrains) > 0 {

View File

@ -1,4 +1,4 @@
package azuremonitor package time
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package azuremonitor package time
import ( import (
"testing" "testing"

View File

@ -1,15 +1,54 @@
package azuremonitor package types
import ( import (
"encoding/json"
"fmt" "fmt"
"net/http"
"net/url" "net/url"
"strings" "regexp"
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/azcredentials"
) )
const (
TimeSeries = "time_series"
)
var (
LegendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
)
type AzRoute struct {
URL string
Scopes []string
Headers map[string]string
}
type AzureMonitorSettings struct {
SubscriptionId string `json:"subscriptionId"`
LogAnalyticsDefaultWorkspace string `json:"logAnalyticsDefaultWorkspace"`
AppInsightsAppId string `json:"appInsightsAppId"`
}
type DatasourceService struct {
URL string
HTTPClient *http.Client
}
type DatasourceInfo struct {
Cloud string
Credentials azcredentials.AzureCredentials
Settings AzureMonitorSettings
Routes map[string]AzRoute
Services map[string]DatasourceService
JSONData map[string]interface{}
DecryptedSecureJSONData map[string]string
DatasourceID int64
OrgID int64
}
// AzureMonitorQuery is the query for all the services as they have similar queries // AzureMonitorQuery is the query for all the services as they have similar queries
// with a url, a querystring and an alias field // with a url, a querystring and an alias field
type AzureMonitorQuery struct { type AzureMonitorQuery struct {
@ -57,16 +96,6 @@ type AzureMonitorResponse struct {
Resourceregion string `json:"resourceregion"` Resourceregion string `json:"resourceregion"`
} }
// AzureLogAnalyticsResponse is the json response object from the Azure Log Analytics API.
type AzureLogAnalyticsResponse struct {
Tables []AzureResponseTable `json:"tables"`
}
// AzureResourceGraphResponse is the json response object from the Azure Resource Graph Analytics API.
type AzureResourceGraphResponse struct {
Data AzureResponseTable `json:"data"`
}
// AzureResponseTable is the table format for Azure responses // AzureResponseTable is the table format for Azure responses
type AzureResponseTable struct { type AzureResponseTable struct {
Name string `json:"name"` Name string `json:"name"`
@ -77,8 +106,8 @@ type AzureResponseTable struct {
Rows [][]interface{} `json:"rows"` Rows [][]interface{} `json:"rows"`
} }
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query. // AzureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
type azureMonitorJSONQuery struct { type AzureMonitorJSONQuery struct {
AzureMonitor struct { AzureMonitor struct {
Aggregation string `json:"aggregation"` Aggregation string `json:"aggregation"`
Alias string `json:"alias"` Alias string `json:"alias"`
@ -94,20 +123,20 @@ type azureMonitorJSONQuery struct {
TimeGrain string `json:"timeGrain"` TimeGrain string `json:"timeGrain"`
Top string `json:"top"` Top string `json:"top"`
DimensionFilters []azureMonitorDimensionFilter `json:"dimensionFilters"` // new model DimensionFilters []AzureMonitorDimensionFilter `json:"dimensionFilters"` // new model
} `json:"azureMonitor"` } `json:"azureMonitor"`
Subscription string `json:"subscription"` Subscription string `json:"subscription"`
} }
// azureMonitorDimensionFilter is the model for the frontend sent for azureMonitor metric // AzureMonitorDimensionFilter is the model for the frontend sent for azureMonitor metric
// queries like "BlobType", "eq", "*" // queries like "BlobType", "eq", "*"
type azureMonitorDimensionFilter struct { type AzureMonitorDimensionFilter struct {
Dimension string `json:"dimension"` Dimension string `json:"dimension"`
Operator string `json:"operator"` Operator string `json:"operator"`
Filter string `json:"filter"` Filter string `json:"filter"`
} }
func (a azureMonitorDimensionFilter) String() string { func (a AzureMonitorDimensionFilter) String() string {
filter := "*" filter := "*"
if a.Filter != "" { if a.Filter != "" {
filter = a.Filter filter = a.Filter
@ -115,29 +144,8 @@ func (a azureMonitorDimensionFilter) String() string {
return fmt.Sprintf("%v %v '%v'", a.Dimension, a.Operator, filter) return fmt.Sprintf("%v %v '%v'", a.Dimension, a.Operator, filter)
} }
// insightsJSONQuery is the frontend JSON query model for an Azure Application Insights query. // LogJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type insightsJSONQuery struct { type LogJSONQuery struct {
AppInsights struct {
Aggregation string `json:"aggregation"`
Alias string `json:"alias"`
AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs"`
Dimensions InsightsDimensions `json:"dimension"`
DimensionFilter string `json:"dimensionFilter"`
MetricName string `json:"metricName"`
TimeGrain string `json:"timeGrain"`
} `json:"appInsights"`
Raw *bool `json:"raw"`
}
type insightsAnalyticsJSONQuery struct {
InsightsAnalytics struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"insightsAnalytics"`
}
// logJSONQuery is the frontend JSON query model for an Azure Log Analytics query.
type logJSONQuery struct {
AzureLogAnalytics struct { AzureLogAnalytics struct {
Query string `json:"query"` Query string `json:"query"`
ResultFormat string `json:"resultFormat"` ResultFormat string `json:"resultFormat"`
@ -148,69 +156,22 @@ type logJSONQuery struct {
} `json:"azureLogAnalytics"` } `json:"azureLogAnalytics"`
} }
type argJSONQuery struct { // MetricChartDefinition is the JSON model for a metrics chart definition
AzureResourceGraph struct { type MetricChartDefinition struct {
Query string `json:"query"`
ResultFormat string `json:"resultFormat"`
} `json:"azureResourceGraph"`
}
// metricChartDefinition is the JSON model for a metrics chart definition
type metricChartDefinition struct {
ResourceMetadata map[string]string `json:"resourceMetadata"` ResourceMetadata map[string]string `json:"resourceMetadata"`
Name string `json:"name"` Name string `json:"name"`
AggregationType int `json:"aggregationType"` AggregationType int `json:"aggregationType"`
Namespace string `json:"namespace"` Namespace string `json:"namespace"`
MetricVisualization metricVisualization `json:"metricVisualization"` MetricVisualization MetricVisualization `json:"metricVisualization"`
} }
// metricVisualization is the JSON model for the visualization field of a // MetricVisualization is the JSON model for the visualization field of a
// metricChartDefinition // metricChartDefinition
type metricVisualization struct { type MetricVisualization struct {
DisplayName string `json:"displayName"` DisplayName string `json:"displayName"`
ResourceDisplayName string `json:"resourceDisplayName"` ResourceDisplayName string `json:"resourceDisplayName"`
} }
// InsightsDimensions will unmarshal from a JSON string, or an array of strings, type ServiceProxy interface {
// into a string array. This exists to support an older query format which is updated Do(rw http.ResponseWriter, req *http.Request, cli *http.Client) http.ResponseWriter
// when a user saves the query or it is sent from the front end, but may not be when
// alerting fetches the model.
type InsightsDimensions []string
// UnmarshalJSON fulfills the json.Unmarshaler interface type.
func (s *InsightsDimensions) UnmarshalJSON(data []byte) error {
*s = InsightsDimensions{}
if string(data) == "null" || string(data) == "" {
return nil
}
if strings.ToLower(string(data)) == `"none"` {
return nil
}
if data[0] == '[' {
var sa []string
err := json.Unmarshal(data, &sa)
if err != nil {
return err
}
dimensions := []string{}
for _, v := range sa {
if v == "none" || v == "None" {
continue
}
dimensions = append(dimensions, v)
}
*s = InsightsDimensions(dimensions)
return nil
}
var str string
err := json.Unmarshal(data, &str)
if err != nil {
return fmt.Errorf("could not parse %q as string or array: %w", string(data), err)
}
if str != "" {
*s = InsightsDimensions{str}
return nil
}
return nil
} }